diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 4cfdc256418..c8d3d31a305 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -90,12 +90,6 @@ public class MapperQueryParser extends QueryParser { this.parseContext = parseContext; } - public MapperQueryParser(QueryParserSettings settings, QueryParseContext parseContext) { - super(settings.defaultField(), settings.defaultAnalyzer()); - this.parseContext = parseContext; - reset(settings); - } - public void reset(QueryParserSettings settings) { this.settings = settings; this.field = settings.defaultField(); diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java b/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java index e079e00303e..76e8b4fccd9 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java @@ -20,7 +20,6 @@ package org.apache.lucene.queryparser.classic; import com.carrotsearch.hppc.ObjectFloatHashMap; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; @@ -28,7 +27,6 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.unit.Fuzziness; import org.joda.time.DateTimeZone; -import java.util.Collection; import java.util.List; import java.util.Locale; @@ -69,16 +67,10 @@ public class QueryParserSettings { private DateTimeZone timeZone; List<String> fields = null; - Collection<String> queryTypes = null; ObjectFloatHashMap<String> boosts = null; float tieBreaker = 0.0f; boolean useDisMax = true; - public boolean isCacheable() { - // a hack for now :) to determine if a query string is cacheable - return !queryString.contains("now"); - } - public String queryString() { return queryString; } @@ -271,14 +263,6 @@ public class QueryParserSettings { this.fields = fields; } - public Collection<String> queryTypes() { - return queryTypes; - } - - public void queryTypes(Collection<String> queryTypes) { - this.queryTypes = queryTypes; - } - public ObjectFloatHashMap<String> boosts() { return boosts; } @@ -371,7 +355,6 @@ public class QueryParserSettings { if (useDisMax != that.useDisMax) return false; if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false; if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false; - if (queryTypes != null ? !queryTypes.equals(that.queryTypes) : that.queryTypes != null) return false; return true; } @@ -398,7 +381,6 @@ public class QueryParserSettings { result = 31 * result + (analyzeWildcard ? 1 : 0); result = 31 * result + (fields != null ? fields.hashCode() : 0); - result = 31 * result + (queryTypes != null ? queryTypes.hashCode() : 0); result = 31 * result + (boosts != null ? boosts.hashCode() : 0); result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0); result = 31 * result + (useDisMax ? 1 : 0); diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index a4b71626fa8..c613f617774 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -19,9 +19,7 @@ package org.elasticsearch.action; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction; @@ -85,7 +83,11 @@ import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.TransportGetIndexAction; -import org.elasticsearch.action.admin.indices.mapping.get.*; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsIndexAction; +import org.elasticsearch.action.admin.indices.mapping.get.TransportGetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; @@ -96,14 +98,14 @@ import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.TransportRecoveryAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportRefreshAction; -import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; -import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; import org.elasticsearch.action.admin.indices.segments.TransportIndicesSegmentsAction; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.settings.get.TransportGetSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; +import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; @@ -139,7 +141,11 @@ import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.TransportExplainAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.TransportFieldStatsTransportAction; -import org.elasticsearch.action.get.*; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.get.TransportGetAction; +import org.elasticsearch.action.get.TransportMultiGetAction; +import org.elasticsearch.action.get.TransportShardMultiGetAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction; @@ -148,16 +154,38 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; -import org.elasticsearch.action.percolate.*; -import org.elasticsearch.action.search.*; -import org.elasticsearch.action.search.type.*; +import org.elasticsearch.action.percolate.MultiPercolateAction; +import org.elasticsearch.action.percolate.PercolateAction; +import org.elasticsearch.action.percolate.TransportMultiPercolateAction; +import org.elasticsearch.action.percolate.TransportPercolateAction; +import org.elasticsearch.action.percolate.TransportShardMultiPercolateAction; +import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.MultiSearchAction; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.TransportClearScrollAction; +import org.elasticsearch.action.search.TransportMultiSearchAction; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchDfsQueryThenFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction; +import org.elasticsearch.action.search.type.TransportSearchScanAction; +import org.elasticsearch.action.search.type.TransportSearchScrollQueryAndFetchAction; +import org.elasticsearch.action.search.type.TransportSearchScrollQueryThenFetchAction; +import org.elasticsearch.action.search.type.TransportSearchScrollScanAction; import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.TransportSuggestAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsAction; +import org.elasticsearch.action.termvectors.TermVectorsAction; +import org.elasticsearch.action.termvectors.TransportMultiTermVectorsAction; +import org.elasticsearch.action.termvectors.TransportShardMultiTermsVectorAction; +import org.elasticsearch.action.termvectors.TransportTermVectorsAction; import org.elasticsearch.action.termvectors.dfs.TransportDfsOnlyAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateAction; @@ -165,6 +193,7 @@ import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -174,7 +203,7 @@ import java.util.Map; public class ActionModule extends AbstractModule { private final Map<String, ActionEntry> actions = Maps.newHashMap(); - private final List<Class<? extends ActionFilter>> actionFilters = Lists.newArrayList(); + private final List<Class<? extends ActionFilter>> actionFilters = new ArrayList<>(); static class ActionEntry<Request extends ActionRequest, Response extends ActionResponse> { public final GenericAction<Request, Response> action; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index 7172c254dd8..755fb330f7b 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -31,16 +31,19 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.action.admin.cluster.health.ClusterIndexHealth.readClusterIndexHealth; /** @@ -152,7 +155,7 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl * All the validation failures, including index level validation failures. */ public List<String> getAllValidationFailures() { - List<String> allFailures = newArrayList(getValidationFailures()); + List<String> allFailures = new ArrayList<>(getValidationFailures()); for (ClusterIndexHealth indexHealth : indices.values()) { allFailures.addAll(indexHealth.getValidationFailures()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 7f2c1994197..59fd9138ef9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; @@ -36,6 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -54,7 +54,7 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction<NodesHo @Override protected NodesHotThreadsResponse newResponse(NodesHotThreadsRequest request, AtomicReferenceArray responses) { - final List<NodeHotThreads> nodes = Lists.newArrayList(); + final List<NodeHotThreads> nodes = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeHotThreads) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 90ad8663ec9..b3d1bd43739 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import com.google.common.collect.Lists; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; @@ -35,6 +34,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -56,7 +56,7 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe @Override protected NodesStatsResponse newResponse(NodesStatsRequest nodesInfoRequest, AtomicReferenceArray responses) { - final List<NodeStats> nodeStats = Lists.newArrayList(); + final List<NodeStats> nodeStats = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeStats) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index d6a0cd52d04..3e8a122978c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.SnapshotsInProgress.State; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -32,11 +32,11 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; /** @@ -109,7 +109,7 @@ public class SnapshotStatus implements ToXContent, Streamable { } for (String index : indices) { - List<SnapshotIndexShardStatus> shards = newArrayList(); + List<SnapshotIndexShardStatus> shards = new ArrayList<>(); for (SnapshotIndexShardStatus shard : this.shards) { if (shard.getIndex().equals(index)) { shards.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 8c535ec67de..25701a80039 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -20,12 +20,15 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.*; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -42,6 +45,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -82,8 +86,8 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction<Transpor @Override protected NodesSnapshotStatus newResponse(Request request, AtomicReferenceArray responses) { - final List<NodeSnapshotStatus> nodesList = Lists.newArrayList(); - final List<FailedNodeException> failures = Lists.newArrayList(); + final List<NodeSnapshotStatus> nodesList = new ArrayList<>(); + final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeSnapshotStatus) { // will also filter out null response for unallocated ones diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index e8243bf5d50..c219d85f9d5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.stats; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -67,10 +66,10 @@ public class ClusterStatsIndices implements ToXContent, Streamable { for (ClusterStatsNodeResponse r : nodeResponses) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { - ShardStats indexShardStats = countsPerIndex.get(shardStats.getIndex()); + ShardStats indexShardStats = countsPerIndex.get(shardStats.getShardRouting().getIndex()); if (indexShardStats == null) { indexShardStats = new ShardStats(); - countsPerIndex.put(shardStats.getIndex(), indexShardStats); + countsPerIndex.put(shardStats.getShardRouting().getIndex(), indexShardStats); } indexShardStats.total++; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 82a29890b0d..26e78264534 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -106,7 +106,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta for (IndexShard indexShard : indexService) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { // only report on fully started shards - shardsStats.add(new ShardStats(indexShard, indexShard.routingEntry(), SHARD_STATS_FLAGS)); + shardsStats.add(new ShardStats(indexShard, SHARD_STATS_FLAGS)); } } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index fd6819c7fc9..779f9a3a328 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -21,8 +21,6 @@ package org.elasticsearch.action.admin.indices.alias; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.action.CompositeIndicesRequest; @@ -54,7 +52,7 @@ import static org.elasticsearch.cluster.metadata.AliasAction.readAliasAction; */ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesRequest> implements CompositeIndicesRequest { - private List<AliasActions> allAliasActions = Lists.newArrayList(); + private List<AliasActions> allAliasActions = new ArrayList<>(); //indices options that require every specified index to exist, expand wildcards only to open indices and //don't allow that no indices are resolved from wildcard expressions diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 9d34c6f346e..3e7a03aa66a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.action.admin.indices.analyze; -import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -39,7 +38,13 @@ import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.analysis.*; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CharFilterFactoryFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactoryFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.index.analysis.TokenizerFactoryFactory; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.shard.ShardId; @@ -49,6 +54,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -210,7 +216,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe throw new IllegalArgumentException("failed to find analyzer"); } - List<AnalyzeResponse.AnalyzeToken> tokens = Lists.newArrayList(); + List<AnalyzeResponse.AnalyzeToken> tokens = new ArrayList<>(); TokenStream stream = null; int lastPosition = -1; int lastOffset = 0; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java deleted file mode 100644 index 3f583b7efa4..00000000000 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * - */ -class ShardClearIndicesCacheRequest extends BroadcastShardRequest { - - private boolean queryCache = false; - private boolean fieldDataCache = false; - private boolean recycler; - private boolean requestCache = false; - - private String[] fields = null; - - ShardClearIndicesCacheRequest() { - } - - ShardClearIndicesCacheRequest(ShardId shardId, ClearIndicesCacheRequest request) { - super(shardId, request); - queryCache = request.queryCache(); - fieldDataCache = request.fieldDataCache(); - fields = request.fields(); - recycler = request.recycler(); - requestCache = request.requestCache(); - } - - public boolean queryCache() { - return queryCache; - } - - public boolean requestCache() { - return requestCache; - } - - public boolean fieldDataCache() { - return this.fieldDataCache; - } - - public boolean recycler() { - return this.recycler; - } - - public String[] fields() { - return this.fields; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - queryCache = in.readBoolean(); - fieldDataCache = in.readBoolean(); - recycler = in.readBoolean(); - fields = in.readStringArray(); - requestCache = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(queryCache); - out.writeBoolean(fieldDataCache); - out.writeBoolean(recycler); - out.writeStringArrayNullable(fields); - out.writeBoolean(requestCache); - } -} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index e77b7009fb0..ba90ca19263 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -21,17 +21,16 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; @@ -40,15 +39,14 @@ import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; /** * Indices clear cache action. */ -public class TransportClearIndicesCacheAction extends TransportBroadcastAction<ClearIndicesCacheRequest, ClearIndicesCacheResponse, ShardClearIndicesCacheRequest, ShardClearIndicesCacheResponse> { +public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAction<ClearIndicesCacheRequest, ClearIndicesCacheResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; private final IndicesRequestCache indicesRequestCache; @@ -59,48 +57,33 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastAction<C IndicesRequestCache indicesQueryCache, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - ClearIndicesCacheRequest.class, ShardClearIndicesCacheRequest.class, ThreadPool.Names.MANAGEMENT); + ClearIndicesCacheRequest.class, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; this.indicesRequestCache = indicesQueryCache; } @Override - protected ClearIndicesCacheResponse newResponse(ClearIndicesCacheRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - successfulShards++; - } - } - return new ClearIndicesCacheResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures); + protected EmptyResult readShardResult(StreamInput in) throws IOException { + return EmptyResult.readEmptyResultFrom(in); } @Override - protected ShardClearIndicesCacheRequest newShardRequest(int numShards, ShardRouting shard, ClearIndicesCacheRequest request) { - return new ShardClearIndicesCacheRequest(shard.shardId(), request); + protected ClearIndicesCacheResponse newResponse(ClearIndicesCacheRequest request, int totalShards, int successfulShards, int failedShards, List<EmptyResult> responses, List<ShardOperationFailedException> shardFailures) { + return new ClearIndicesCacheResponse(totalShards, successfulShards, failedShards, shardFailures); } @Override - protected ShardClearIndicesCacheResponse newShardResponse() { - return new ShardClearIndicesCacheResponse(); + protected ClearIndicesCacheRequest readRequestFrom(StreamInput in) throws IOException { + final ClearIndicesCacheRequest request = new ClearIndicesCacheRequest(); + request.readFrom(in); + return request; } @Override - protected ShardClearIndicesCacheResponse shardOperation(ShardClearIndicesCacheRequest request) { - IndexService service = indicesService.indexService(request.shardId().getIndex()); + protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) { + IndexService service = indicesService.indexService(shardRouting.getIndex()); if (service != null) { - IndexShard shard = service.shard(request.shardId().id()); + IndexShard shard = service.shard(shardRouting.id()); boolean clearedAtLeastOne = false; if (request.queryCache()) { clearedAtLeastOne = true; @@ -138,15 +121,15 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastAction<C } } } - return new ShardClearIndicesCacheResponse(request.shardId()); + return EmptyResult.INSTANCE; } /** * The refresh request works against *all* shards. */ @Override - protected GroupShardsIterator shards(ClusterState clusterState, ClearIndicesCacheRequest request, String[] concreteIndices) { - return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + protected ShardsIterator shards(ClusterState clusterState, ClearIndicesCacheRequest request, String[] concreteIndices) { + return clusterState.routingTable().allShards(concreteIndices); } @Override @@ -158,5 +141,4 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastAction<C protected ClusterBlockException checkRequestBlock(ClusterState state, ClearIndicesCacheRequest request, String[] concreteIndices) { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); } - } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index c6c3c316f68..323a6cc2382 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -38,11 +38,10 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * Flush Action. */ @@ -71,7 +70,7 @@ public class TransportFlushAction extends TransportBroadcastAction<FlushRequest, } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 8e51133d132..265d326f164 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.indices.mapping.get; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.support.ActionFilters; @@ -55,6 +54,8 @@ import java.io.IOException; import java.util.Collection; import java.util.Iterator; +import static org.elasticsearch.common.util.CollectionUtils.newLinkedList; + /** * Transport action used to retrieve the mappings related to fields that belong to a specific index */ @@ -178,7 +179,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name. // also make sure we only store each mapper once. - Collection<FieldMapper> remainingFieldMappers = Lists.newLinkedList(allFieldMappers); + Collection<FieldMapper> remainingFieldMappers = newLinkedList(allFieldMappers); for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java deleted file mode 100644 index 05aeabe21a6..00000000000 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.optimize; - - -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * - */ -final class ShardOptimizeRequest extends BroadcastShardRequest { - - private OptimizeRequest request = new OptimizeRequest(); - - ShardOptimizeRequest() { - } - - ShardOptimizeRequest(ShardId shardId, OptimizeRequest request) { - super(shardId, request); - this.request = request; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - request.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - request.writeTo(out); - } - - public OptimizeRequest optimizeRequest() { - return this.request; - } -} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java index 6b5416985e7..c5fed3144c5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java @@ -21,32 +21,29 @@ package org.elasticsearch.action.admin.indices.optimize; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; /** * Optimize index/indices action. */ -public class TransportOptimizeAction extends TransportBroadcastAction<OptimizeRequest, OptimizeResponse, ShardOptimizeRequest, ShardOptimizeResponse> { +public class TransportOptimizeAction extends TransportBroadcastByNodeAction<OptimizeRequest, OptimizeResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; @@ -55,55 +52,40 @@ public class TransportOptimizeAction extends TransportBroadcastAction<OptimizeRe TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, OptimizeAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - OptimizeRequest.class, ShardOptimizeRequest.class, ThreadPool.Names.OPTIMIZE); + OptimizeRequest.class, ThreadPool.Names.OPTIMIZE); this.indicesService = indicesService; } @Override - protected OptimizeResponse newResponse(OptimizeRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // a non active shard, ignore... - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - successfulShards++; - } - } - return new OptimizeResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures); + protected EmptyResult readShardResult(StreamInput in) throws IOException { + return EmptyResult.readEmptyResultFrom(in); } @Override - protected ShardOptimizeRequest newShardRequest(int numShards, ShardRouting shard, OptimizeRequest request) { - return new ShardOptimizeRequest(shard.shardId(), request); + protected OptimizeResponse newResponse(OptimizeRequest request, int totalShards, int successfulShards, int failedShards, List<EmptyResult> responses, List<ShardOperationFailedException> shardFailures) { + return new OptimizeResponse(totalShards, successfulShards, failedShards, shardFailures); } @Override - protected ShardOptimizeResponse newShardResponse() { - return new ShardOptimizeResponse(); + protected OptimizeRequest readRequestFrom(StreamInput in) throws IOException { + final OptimizeRequest request = new OptimizeRequest(); + request.readFrom(in); + return request; } @Override - protected ShardOptimizeResponse shardOperation(ShardOptimizeRequest request) { - IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).shardSafe(request.shardId().id()); - indexShard.optimize(request.optimizeRequest()); - return new ShardOptimizeResponse(request.shardId()); + protected EmptyResult shardOperation(OptimizeRequest request, ShardRouting shardRouting) { + IndexShard indexShard = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()).shardSafe(shardRouting.shardId().id()); + indexShard.optimize(request); + return EmptyResult.INSTANCE; } /** * The refresh request works against *all* shards. */ @Override - protected GroupShardsIterator shards(ClusterState clusterState, OptimizeRequest request, String[] concreteIndices) { - return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + protected ShardsIterator shards(ClusterState clusterState, OptimizeRequest request, String[] concreteIndices) { + return clusterState.routingTable().allShards(concreteIndices); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java index fea33688c14..0e0881d1729 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.indices.recovery.RecoveryState; import java.io.IOException; import java.util.ArrayList; @@ -38,7 +39,7 @@ import java.util.Map; public class RecoveryResponse extends BroadcastResponse implements ToXContent { private boolean detailed = false; - private Map<String, List<ShardRecoveryResponse>> shardResponses = new HashMap<>(); + private Map<String, List<RecoveryState>> shardRecoveryStates = new HashMap<>(); public RecoveryResponse() { } @@ -50,18 +51,18 @@ public class RecoveryResponse extends BroadcastResponse implements ToXContent { * @param successfulShards Count of shards successfully processed * @param failedShards Count of shards which failed to process * @param detailed Display detailed metrics - * @param shardResponses Map of indices to shard recovery information + * @param shardRecoveryStates Map of indices to shard recovery information * @param shardFailures List of failures processing shards */ public RecoveryResponse(int totalShards, int successfulShards, int failedShards, boolean detailed, - Map<String, List<ShardRecoveryResponse>> shardResponses, List<ShardOperationFailedException> shardFailures) { + Map<String, List<RecoveryState>> shardRecoveryStates, List<ShardOperationFailedException> shardFailures) { super(totalShards, successfulShards, failedShards, shardFailures); - this.shardResponses = shardResponses; + this.shardRecoveryStates = shardRecoveryStates; this.detailed = detailed; } public boolean hasRecoveries() { - return shardResponses.size() > 0; + return shardRecoveryStates.size() > 0; } public boolean detailed() { @@ -72,23 +73,23 @@ public class RecoveryResponse extends BroadcastResponse implements ToXContent { this.detailed = detailed; } - public Map<String, List<ShardRecoveryResponse>> shardResponses() { - return shardResponses; + public Map<String, List<RecoveryState>> shardRecoveryStates() { + return shardRecoveryStates; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (hasRecoveries()) { - for (String index : shardResponses.keySet()) { - List<ShardRecoveryResponse> responses = shardResponses.get(index); - if (responses == null || responses.size() == 0) { + for (String index : shardRecoveryStates.keySet()) { + List<RecoveryState> recoveryStates = shardRecoveryStates.get(index); + if (recoveryStates == null || recoveryStates.size() == 0) { continue; } builder.startObject(index); builder.startArray("shards"); - for (ShardRecoveryResponse recoveryResponse : responses) { + for (RecoveryState recoveryState : recoveryStates) { builder.startObject(); - recoveryResponse.toXContent(builder, params); + recoveryState.toXContent(builder, params); builder.endObject(); } builder.endArray(); @@ -101,12 +102,12 @@ public class RecoveryResponse extends BroadcastResponse implements ToXContent { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeVInt(shardResponses.size()); - for (Map.Entry<String, List<ShardRecoveryResponse>> entry : shardResponses.entrySet()) { + out.writeVInt(shardRecoveryStates.size()); + for (Map.Entry<String, List<RecoveryState>> entry : shardRecoveryStates.entrySet()) { out.writeString(entry.getKey()); out.writeVInt(entry.getValue().size()); - for (ShardRecoveryResponse recoveryResponse : entry.getValue()) { - recoveryResponse.writeTo(out); + for (RecoveryState recoveryState : entry.getValue()) { + recoveryState.writeTo(out); } } } @@ -118,11 +119,11 @@ public class RecoveryResponse extends BroadcastResponse implements ToXContent { for (int i = 0; i < size; i++) { String s = in.readString(); int listSize = in.readVInt(); - List<ShardRecoveryResponse> list = new ArrayList<>(listSize); + List<RecoveryState> list = new ArrayList<>(listSize); for (int j = 0; j < listSize; j++) { - list.add(ShardRecoveryResponse.readShardRecoveryResponse(in)); + list.add(RecoveryState.readRecoveryState(in)); } - shardResponses.put(s, list); + shardRecoveryStates.put(s, list); } } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java deleted file mode 100644 index a4104fbc449..00000000000 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.recovery; - -import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.recovery.RecoveryState; - -import java.io.IOException; - -/** - * Information regarding the recovery state of a shard. - */ -public class ShardRecoveryResponse extends BroadcastShardResponse implements ToXContent { - - RecoveryState recoveryState; - - public ShardRecoveryResponse() { } - - /** - * Constructs shard recovery information for the given index and shard id. - * - * @param shardId Id of the shard - */ - ShardRecoveryResponse(ShardId shardId) { - super(shardId); - } - - /** - * Sets the recovery state information for the shard. - * - * @param recoveryState Recovery state - */ - public void recoveryState(RecoveryState recoveryState) { - this.recoveryState = recoveryState; - } - - /** - * Gets the recovery state information for the shard. Null if shard wasn't recovered / recovery didn't start yet. - * - * @return Recovery state - */ - @Nullable - public RecoveryState recoveryState() { - return recoveryState; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - recoveryState.toXContent(builder, params); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - recoveryState.writeTo(out); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - recoveryState = RecoveryState.readRecoveryState(in); - } - - /** - * Builds a new ShardRecoveryResponse from the give input stream. - * - * @param in Input stream - * @return A new ShardRecoveryResponse - * @throws IOException - */ - public static ShardRecoveryResponse readShardRecoveryResponse(StreamInput in) throws IOException { - ShardRecoveryResponse response = new ShardRecoveryResponse(); - response.readFrom(in); - return response; - } -} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index cee59c7eb2a..86817fd1cd9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -19,40 +19,37 @@ package org.elasticsearch.action.admin.indices.recovery; +import com.google.common.collect.Maps; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * Transport action for shard recovery operation. This transport action does not actually * perform shard recovery, it only reports on recoveries (both active and complete). */ -public class TransportRecoveryAction extends TransportBroadcastAction<RecoveryRequest, RecoveryResponse, TransportRecoveryAction.ShardRecoveryRequest, ShardRecoveryResponse> { +public class TransportRecoveryAction extends TransportBroadcastByNodeAction<RecoveryRequest, RecoveryResponse, RecoveryState> { private final IndicesService indicesService; @@ -61,84 +58,55 @@ public class TransportRecoveryAction extends TransportBroadcastAction<RecoveryRe TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, RecoveryAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - RecoveryRequest.class, ShardRecoveryRequest.class, ThreadPool.Names.MANAGEMENT); + RecoveryRequest.class, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } @Override - protected RecoveryResponse newResponse(RecoveryRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { + protected RecoveryState readShardResult(StreamInput in) throws IOException { + return RecoveryState.readRecoveryState(in); + } - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - Map<String, List<ShardRecoveryResponse>> shardResponses = new HashMap<>(); - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = new ArrayList<>(); + @Override + protected RecoveryResponse newResponse(RecoveryRequest request, int totalShards, int successfulShards, int failedShards, List<RecoveryState> responses, List<ShardOperationFailedException> shardFailures) { + Map<String, List<RecoveryState>> shardResponses = Maps.newHashMap(); + for (RecoveryState recoveryState : responses) { + if (recoveryState == null) { + continue; + } + String indexName = recoveryState.getShardId().getIndex(); + if (!shardResponses.containsKey(indexName)) { + shardResponses.put(indexName, new ArrayList<RecoveryState>()); + } + if (request.activeOnly()) { + if (recoveryState.getStage() != RecoveryState.Stage.DONE) { + shardResponses.get(indexName).add(recoveryState); } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { - ShardRecoveryResponse recoveryResponse = (ShardRecoveryResponse) shardResponse; - successfulShards++; - - if (recoveryResponse.recoveryState() == null) { - // recovery not yet started - continue; - } - - String indexName = recoveryResponse.getIndex(); - List<ShardRecoveryResponse> responses = shardResponses.get(indexName); - - if (responses == null) { - responses = new ArrayList<>(); - shardResponses.put(indexName, responses); - } - - if (request.activeOnly()) { - if (recoveryResponse.recoveryState().getStage() != RecoveryState.Stage.DONE) { - responses.add(recoveryResponse); - } - } else { - responses.add(recoveryResponse); - } + shardResponses.get(indexName).add(recoveryState); } } - - return new RecoveryResponse(shardsResponses.length(), successfulShards, - failedShards, request.detailed(), shardResponses, shardFailures); + return new RecoveryResponse(totalShards, successfulShards, failedShards, request.detailed(), shardResponses, shardFailures); } @Override - protected ShardRecoveryRequest newShardRequest(int numShards, ShardRouting shard, RecoveryRequest request) { - return new ShardRecoveryRequest(shard.shardId(), request); + protected RecoveryRequest readRequestFrom(StreamInput in) throws IOException { + final RecoveryRequest recoveryRequest = new RecoveryRequest(); + recoveryRequest.readFrom(in); + return recoveryRequest; } @Override - protected ShardRecoveryResponse newShardResponse() { - return new ShardRecoveryResponse(); + protected RecoveryState shardOperation(RecoveryRequest request, ShardRouting shardRouting) { + IndexService indexService = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()); + IndexShard indexShard = indexService.shardSafe(shardRouting.shardId().id()); + return indexShard.recoveryState(); } @Override - protected ShardRecoveryResponse shardOperation(ShardRecoveryRequest request) { - - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); - ShardRecoveryResponse shardRecoveryResponse = new ShardRecoveryResponse(request.shardId()); - - RecoveryState state = indexShard.recoveryState(); - shardRecoveryResponse.recoveryState(state); - return shardRecoveryResponse; - } - - @Override - protected GroupShardsIterator shards(ClusterState state, RecoveryRequest request, String[] concreteIndices) { - return state.routingTable().allAssignedShardsGrouped(concreteIndices, true, true); + protected ShardsIterator shards(ClusterState state, RecoveryRequest request, String[] concreteIndices) { + return state.routingTable().allShardsIncludingRelocationTargets(concreteIndices); } @Override @@ -150,14 +118,4 @@ public class TransportRecoveryAction extends TransportBroadcastAction<RecoveryRe protected ClusterBlockException checkRequestBlock(ClusterState state, RecoveryRequest request, String[] concreteIndices) { return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices); } - - static class ShardRecoveryRequest extends BroadcastShardRequest { - - ShardRecoveryRequest() { - } - - ShardRecoveryRequest(ShardId shardId, RecoveryRequest request) { - super(shardId, request); - } - } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index 32e9e509993..2eead86e202 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -38,11 +38,10 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * Refresh action. */ @@ -71,7 +70,7 @@ public class TransportRefreshAction extends TransportBroadcastAction<RefreshRequ } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndexSegments.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndexSegments.java index 453a6f12b1a..52c31433591 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndexSegments.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndexSegments.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.segments; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -39,7 +39,7 @@ public class IndexSegments implements Iterable<IndexShardSegments> { for (ShardSegments shard : shards) { List<ShardSegments> lst = tmpIndexShards.get(shard.getShardRouting().id()); if (lst == null) { - lst = Lists.newArrayList(); + lst = new ArrayList<>(); tmpIndexShards.put(shard.getShardRouting().id(), lst); } lst.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 6b0dc8697f3..a3e5f16ee1e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -19,13 +19,11 @@ package org.elasticsearch.action.admin.indices.segments; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeValue; @@ -35,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.engine.Segment; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; @@ -50,7 +49,7 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont } - IndicesSegmentResponse(ShardSegments[] shards, ClusterState clusterState, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { + IndicesSegmentResponse(ShardSegments[] shards, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { super(totalShards, successfulShards, failedShards, shardFailures); this.shards = shards; } @@ -63,11 +62,11 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont Set<String> indices = Sets.newHashSet(); for (ShardSegments shard : shards) { - indices.add(shard.getIndex()); + indices.add(shard.getShardRouting().getIndex()); } for (String index : indices) { - List<ShardSegments> shards = Lists.newArrayList(); + List<ShardSegments> shards = new ArrayList<>(); for (ShardSegments shard : this.shards) { if (shard.getShardRouting().index().equals(index)) { shards.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java index 6e754a26210..4b3264fca40 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java @@ -20,10 +20,10 @@ package org.elasticsearch.action.admin.indices.segments; import com.google.common.collect.ImmutableList; -import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.index.engine.Segment; import java.io.IOException; @@ -33,7 +33,7 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; -public class ShardSegments extends BroadcastShardResponse implements Iterable<Segment> { +public class ShardSegments implements Streamable, Iterable<Segment> { private ShardRouting shardRouting; @@ -43,7 +43,6 @@ public class ShardSegments extends BroadcastShardResponse implements Iterable<Se } ShardSegments(ShardRouting shardRouting, List<Segment> segments) { - super(shardRouting.shardId()); this.shardRouting = shardRouting; this.segments = segments; } @@ -89,7 +88,6 @@ public class ShardSegments extends BroadcastShardResponse implements Iterable<Se @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); shardRouting = readShardRoutingEntry(in); int size = in.readVInt(); if (size == 0) { @@ -104,7 +102,6 @@ public class ShardSegments extends BroadcastShardResponse implements Iterable<Se @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); shardRouting.writeTo(out); out.writeVInt(segments.size()); for (Segment segment : segments) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index ee7cc5a98a1..c03a1cecde0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -21,38 +21,30 @@ package org.elasticsearch.action.admin.indices.segments; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportIndicesSegmentsAction extends TransportBroadcastAction<IndicesSegmentsRequest, IndicesSegmentResponse, TransportIndicesSegmentsAction.IndexShardSegmentRequest, ShardSegments> { +public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeAction<IndicesSegmentsRequest, IndicesSegmentResponse, ShardSegments> { private final IndicesService indicesService; @@ -60,7 +52,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastAction<Ind public TransportIndicesSegmentsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, IndicesSegmentsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - IndicesSegmentsRequest.class, TransportIndicesSegmentsAction.IndexShardSegmentRequest.class, ThreadPool.Names.MANAGEMENT); + IndicesSegmentsRequest.class, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } @@ -68,8 +60,8 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastAction<Ind * Segments goes across *all* active shards. */ @Override - protected GroupShardsIterator shards(ClusterState clusterState, IndicesSegmentsRequest request, String[] concreteIndices) { - return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + protected ShardsIterator shards(ClusterState clusterState, IndicesSegmentsRequest request, String[] concreteIndices) { + return clusterState.routingTable().allShards(concreteIndices); } @Override @@ -83,68 +75,26 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastAction<Ind } @Override - protected IndicesSegmentResponse newResponse(IndicesSegmentsRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - final List<ShardSegments> shards = newArrayList(); - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - shards.add((ShardSegments) shardResponse); - successfulShards++; - } - } - return new IndicesSegmentResponse(shards.toArray(new ShardSegments[shards.size()]), clusterState, shardsResponses.length(), successfulShards, failedShards, shardFailures); + protected ShardSegments readShardResult(StreamInput in) throws IOException { + return ShardSegments.readShardSegments(in); } @Override - protected IndexShardSegmentRequest newShardRequest(int numShards, ShardRouting shard, IndicesSegmentsRequest request) { - return new IndexShardSegmentRequest(shard.shardId(), request); + protected IndicesSegmentResponse newResponse(IndicesSegmentsRequest request, int totalShards, int successfulShards, int failedShards, List<ShardSegments> results, List<ShardOperationFailedException> shardFailures) { + return new IndicesSegmentResponse(results.toArray(new ShardSegments[results.size()]), totalShards, successfulShards, failedShards, shardFailures); } @Override - protected ShardSegments newShardResponse() { - return new ShardSegments(); + protected IndicesSegmentsRequest readRequestFrom(StreamInput in) throws IOException { + final IndicesSegmentsRequest request = new IndicesSegmentsRequest(); + request.readFrom(in); + return request; } @Override - protected ShardSegments shardOperation(IndexShardSegmentRequest request) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); - return new ShardSegments(indexShard.routingEntry(), indexShard.engine().segments(request.verbose)); - } - - static class IndexShardSegmentRequest extends BroadcastShardRequest { - boolean verbose; - - IndexShardSegmentRequest() { - verbose = false; - } - - IndexShardSegmentRequest(ShardId shardId, IndicesSegmentsRequest request) { - super(shardId, request); - verbose = request.verbose(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(verbose); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - verbose = in.readBoolean(); - } + protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) { + IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndex()); + IndexShard indexShard = indexService.shardSafe(shardRouting.id()); + return new ShardSegments(indexShard.routingEntry(), indexShard.engine().segments(request.verbose())); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 7cda8d0972a..6f701e4134c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.stats; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -57,7 +57,7 @@ public class IndexStats implements Iterable<IndexShardStats> { for (ShardStats shard : shards) { List<ShardStats> lst = tmpIndexShards.get(shard.getShardRouting().id()); if (lst == null) { - lst = Lists.newArrayList(); + lst = new ArrayList<>(); tmpIndexShards.put(shard.getShardRouting().id(), lst); } lst.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 2d9bf1e78f9..885dddeea6a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -20,12 +20,10 @@ package org.elasticsearch.action.admin.indices.stats; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,7 +50,7 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten } - IndicesStatsResponse(ShardStats[] shards, ClusterState clusterState, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { + IndicesStatsResponse(ShardStats[] shards, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { super(totalShards, successfulShards, failedShards, shardFailures); this.shards = shards; } @@ -90,11 +89,11 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten Set<String> indices = Sets.newHashSet(); for (ShardStats shard : shards) { - indices.add(shard.getIndex()); + indices.add(shard.getShardRouting().getIndex()); } for (String index : indices) { - List<ShardStats> shards = Lists.newArrayList(); + List<ShardStats> shards = new ArrayList<>(); for (ShardStats shard : this.shards) { if (shard.getShardRouting().index().equals(index)) { shards.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 5f4dd98ddec..2ef6c4df41d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -19,11 +19,11 @@ package org.elasticsearch.action.admin.indices.stats; -import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -36,21 +36,23 @@ import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEnt /** */ -public class ShardStats extends BroadcastShardResponse implements ToXContent { - +public class ShardStats implements Streamable, ToXContent { private ShardRouting shardRouting; - - CommonStats commonStats; - + private CommonStats commonStats; @Nullable - CommitStats commitStats; + private CommitStats commitStats; + private String dataPath; + private String statePath; + private boolean isCustomDataPath; ShardStats() { } - public ShardStats(IndexShard indexShard, ShardRouting shardRouting, CommonStatsFlags flags) { - super(indexShard.shardId()); - this.shardRouting = shardRouting; + public ShardStats(IndexShard indexShard, CommonStatsFlags flags) { + this.shardRouting = indexShard.routingEntry(); + this.dataPath = indexShard.shardPath().getRootDataPath().toString(); + this.statePath = indexShard.shardPath().getRootStatePath().toString(); + this.isCustomDataPath = indexShard.shardPath().isCustomDataPath(); this.commonStats = new CommonStats(indexShard, flags); this.commitStats = indexShard.commitStats(); } @@ -70,6 +72,18 @@ public class ShardStats extends BroadcastShardResponse implements ToXContent { return this.commitStats; } + public String getDataPath() { + return dataPath; + } + + public String getStatePath() { + return statePath; + } + + public boolean isCustomDataPath() { + return isCustomDataPath; + } + public static ShardStats readShardStats(StreamInput in) throws IOException { ShardStats stats = new ShardStats(); stats.readFrom(in); @@ -78,18 +92,22 @@ public class ShardStats extends BroadcastShardResponse implements ToXContent { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); shardRouting = readShardRoutingEntry(in); commonStats = CommonStats.readCommonStats(in); commitStats = CommitStats.readOptionalCommitStatsFrom(in); + statePath = in.readString(); + dataPath = in.readString(); + isCustomDataPath = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); shardRouting.writeTo(out); commonStats.writeTo(out); out.writeOptionalStreamable(commitStats); + out.writeString(statePath); + out.writeString(dataPath); + out.writeBoolean(isCustomDataPath); } @Override @@ -105,15 +123,23 @@ public class ShardStats extends BroadcastShardResponse implements ToXContent { if (commitStats != null) { commitStats.toXContent(builder, params); } + builder.startObject(Fields.SHARD_PATH); + builder.field(Fields.STATE_PATH, statePath); + builder.field(Fields.DATA_PATH, dataPath); + builder.field(Fields.IS_CUSTOM_DATA_PATH, isCustomDataPath); + builder.endObject(); return builder; } static final class Fields { static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); static final XContentBuilderString STATE = new XContentBuilderString("state"); + static final XContentBuilderString STATE_PATH = new XContentBuilderString("state_path"); + static final XContentBuilderString DATA_PATH = new XContentBuilderString("data_path"); + static final XContentBuilderString IS_CUSTOM_DATA_PATH = new XContentBuilderString("is_custom_data_path"); + static final XContentBuilderString SHARD_PATH = new XContentBuilderString("shard_path"); static final XContentBuilderString PRIMARY = new XContentBuilderString("primary"); static final XContentBuilderString NODE = new XContentBuilderString("node"); static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node"); } - } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index b298cdff75b..bf38f4dc7c0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -19,27 +19,21 @@ package org.elasticsearch.action.admin.indices.stats; -import com.google.common.collect.Lists; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; @@ -47,13 +41,10 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; /** */ -public class TransportIndicesStatsAction extends TransportBroadcastAction<IndicesStatsRequest, IndicesStatsResponse, TransportIndicesStatsAction.IndexShardStatsRequest, ShardStats> { +public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction<IndicesStatsRequest, IndicesStatsResponse, ShardStats> { private final IndicesService indicesService; @@ -62,7 +53,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastAction<Indice TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, IndicesStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - IndicesStatsRequest.class, IndexShardStatsRequest.class, ThreadPool.Names.MANAGEMENT); + IndicesStatsRequest.class, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } @@ -70,8 +61,8 @@ public class TransportIndicesStatsAction extends TransportBroadcastAction<Indice * Status goes across *all* shards. */ @Override - protected GroupShardsIterator shards(ClusterState clusterState, IndicesStatsRequest request, String[] concreteIndices) { - return clusterState.routingTable().allAssignedShardsGrouped(concreteIndices, true); + protected ShardsIterator shards(ClusterState clusterState, IndicesStatsRequest request, String[] concreteIndices) { + return clusterState.routingTable().allShards(concreteIndices); } @Override @@ -84,45 +75,27 @@ public class TransportIndicesStatsAction extends TransportBroadcastAction<Indice return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, concreteIndices); } - @Override - protected IndicesStatsResponse newResponse(IndicesStatsRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - final List<ShardStats> shards = Lists.newArrayList(); - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - shards.add((ShardStats) shardResponse); - successfulShards++; - } - } - return new IndicesStatsResponse(shards.toArray(new ShardStats[shards.size()]), clusterState, shardsResponses.length(), successfulShards, failedShards, shardFailures); + protected ShardStats readShardResult(StreamInput in) throws IOException { + return ShardStats.readShardStats(in); } @Override - protected IndexShardStatsRequest newShardRequest(int numShards, ShardRouting shard, IndicesStatsRequest request) { - return new IndexShardStatsRequest(shard.shardId(), request); + protected IndicesStatsResponse newResponse(IndicesStatsRequest request, int totalShards, int successfulShards, int failedShards, List<ShardStats> responses, List<ShardOperationFailedException> shardFailures) { + return new IndicesStatsResponse(responses.toArray(new ShardStats[responses.size()]), totalShards, successfulShards, failedShards, shardFailures); } @Override - protected ShardStats newShardResponse() { - return new ShardStats(); + protected IndicesStatsRequest readRequestFrom(StreamInput in) throws IOException { + IndicesStatsRequest request = new IndicesStatsRequest(); + request.readFrom(in); + return request; } @Override - protected ShardStats shardOperation(IndexShardStatsRequest request) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); + protected ShardStats shardOperation(IndicesStatsRequest request, ShardRouting shardRouting) { + IndexService indexService = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()); + IndexShard indexShard = indexService.shardSafe(shardRouting.shardId().id()); // if we don't have the routing entry yet, we need it stats wise, we treat it as if the shard is not ready yet if (indexShard.routingEntry() == null) { throw new ShardNotFoundException(indexShard.shardId()); @@ -130,92 +103,65 @@ public class TransportIndicesStatsAction extends TransportBroadcastAction<Indice CommonStatsFlags flags = new CommonStatsFlags().clear(); - if (request.request.docs()) { + if (request.docs()) { flags.set(CommonStatsFlags.Flag.Docs); } - if (request.request.store()) { + if (request.store()) { flags.set(CommonStatsFlags.Flag.Store); } - if (request.request.indexing()) { + if (request.indexing()) { flags.set(CommonStatsFlags.Flag.Indexing); - flags.types(request.request.types()); + flags.types(request.types()); } - if (request.request.get()) { + if (request.get()) { flags.set(CommonStatsFlags.Flag.Get); } - if (request.request.search()) { + if (request.search()) { flags.set(CommonStatsFlags.Flag.Search); - flags.groups(request.request.groups()); + flags.groups(request.groups()); } - if (request.request.merge()) { + if (request.merge()) { flags.set(CommonStatsFlags.Flag.Merge); } - if (request.request.refresh()) { + if (request.refresh()) { flags.set(CommonStatsFlags.Flag.Refresh); } - if (request.request.flush()) { + if (request.flush()) { flags.set(CommonStatsFlags.Flag.Flush); } - if (request.request.warmer()) { + if (request.warmer()) { flags.set(CommonStatsFlags.Flag.Warmer); } - if (request.request.queryCache()) { + if (request.queryCache()) { flags.set(CommonStatsFlags.Flag.QueryCache); } - if (request.request.fieldData()) { + if (request.fieldData()) { flags.set(CommonStatsFlags.Flag.FieldData); - flags.fieldDataFields(request.request.fieldDataFields()); + flags.fieldDataFields(request.fieldDataFields()); } - if (request.request.percolate()) { + if (request.percolate()) { flags.set(CommonStatsFlags.Flag.Percolate); } - if (request.request.segments()) { + if (request.segments()) { flags.set(CommonStatsFlags.Flag.Segments); } - if (request.request.completion()) { + if (request.completion()) { flags.set(CommonStatsFlags.Flag.Completion); - flags.completionDataFields(request.request.completionFields()); + flags.completionDataFields(request.completionFields()); } - if (request.request.translog()) { + if (request.translog()) { flags.set(CommonStatsFlags.Flag.Translog); } - if (request.request.suggest()) { + if (request.suggest()) { flags.set(CommonStatsFlags.Flag.Suggest); } - if (request.request.requestCache()) { + if (request.requestCache()) { flags.set(CommonStatsFlags.Flag.RequestCache); } - if (request.request.recovery()) { + if (request.recovery()) { flags.set(CommonStatsFlags.Flag.Recovery); } - return new ShardStats(indexShard, indexShard.routingEntry(), flags); - } - - static class IndexShardStatsRequest extends BroadcastShardRequest { - - // TODO if there are many indices, the request might hold a large indices array..., we don't really need to serialize it - IndicesStatsRequest request; - - IndexShardStatsRequest() { - } - - IndexShardStatsRequest(ShardId shardId, IndicesStatsRequest request) { - super(shardId, request); - this.request = request; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - request = new IndicesStatsRequest(); - request.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - request.writeTo(out); - } + return new ShardStats(indexShard, flags); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java index 2ce6d8d2c1a..dfe99852dce 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java @@ -18,13 +18,13 @@ */ package org.elasticsearch.action.admin.indices.template.get; -import com.google.common.collect.Lists; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -49,7 +49,7 @@ public class GetIndexTemplatesResponse extends ActionResponse { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - indexTemplates = Lists.newArrayListWithExpectedSize(size); + indexTemplates = new ArrayList<>(size); for (int i = 0 ; i < size ; i++) { indexTemplates.add(0, IndexTemplateMetaData.Builder.readFrom(in)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java index f5f33c657a3..23f2399da39 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.template.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; @@ -35,6 +34,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; /** @@ -69,9 +70,9 @@ public class TransportGetIndexTemplatesAction extends TransportMasterNodeReadAct // If we did not ask for a specific name, then we return all templates if (request.names().length == 0) { - results = Lists.newArrayList(state.metaData().templates().values().toArray(IndexTemplateMetaData.class)); + results = Arrays.asList(state.metaData().templates().values().toArray(IndexTemplateMetaData.class)); } else { - results = Lists.newArrayList(); + results = new ArrayList<>(); } for (String name : request.names()) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java index 33a60328951..b85359cb696 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java @@ -19,9 +19,9 @@ package org.elasticsearch.action.admin.indices.upgrade.get; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -39,7 +39,7 @@ public class IndexUpgradeStatus implements Iterable<IndexShardUpgradeStatus> { for (ShardUpgradeStatus shard : shards) { List<ShardUpgradeStatus> lst = tmpIndexShards.get(shard.getShardRouting().id()); if (lst == null) { - lst = Lists.newArrayList(); + lst = new ArrayList<>(); tmpIndexShards.put(shard.getShardRouting().id(), lst); } lst.add(shard); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index a169bc4f76d..b12a7d81465 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -22,36 +22,31 @@ package org.elasticsearch.action.admin.indices.upgrade.get; import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportUpgradeStatusAction extends TransportBroadcastAction<UpgradeStatusRequest, UpgradeStatusResponse, TransportUpgradeStatusAction.IndexShardUpgradeStatusRequest, ShardUpgradeStatus> { +public class TransportUpgradeStatusAction extends TransportBroadcastByNodeAction<UpgradeStatusRequest, UpgradeStatusResponse, ShardUpgradeStatus> { private final IndicesService indicesService; @@ -59,7 +54,7 @@ public class TransportUpgradeStatusAction extends TransportBroadcastAction<Upgra public TransportUpgradeStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, UpgradeStatusAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - UpgradeStatusRequest.class, IndexShardUpgradeStatusRequest.class, ThreadPool.Names.MANAGEMENT); + UpgradeStatusRequest.class, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } @@ -67,8 +62,8 @@ public class TransportUpgradeStatusAction extends TransportBroadcastAction<Upgra * Getting upgrade stats from *all* active shards. */ @Override - protected GroupShardsIterator shards(ClusterState clusterState, UpgradeStatusRequest request, String[] concreteIndices) { - return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + protected ShardsIterator shards(ClusterState clusterState, UpgradeStatusRequest request, String[] concreteIndices) { + return clusterState.routingTable().allShards(concreteIndices); } @Override @@ -82,43 +77,26 @@ public class TransportUpgradeStatusAction extends TransportBroadcastAction<Upgra } @Override - protected UpgradeStatusResponse newResponse(UpgradeStatusRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - List<ShardOperationFailedException> shardFailures = null; - final List<ShardUpgradeStatus> shards = newArrayList(); - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - shards.add((ShardUpgradeStatus) shardResponse); - successfulShards++; - } - } - return new UpgradeStatusResponse(shards.toArray(new ShardUpgradeStatus[shards.size()]), shardsResponses.length(), successfulShards, failedShards, shardFailures); + protected ShardUpgradeStatus readShardResult(StreamInput in) throws IOException { + return ShardUpgradeStatus.readShardUpgradeStatus(in); } @Override - protected IndexShardUpgradeStatusRequest newShardRequest(int numShards, ShardRouting shard, UpgradeStatusRequest request) { - return new IndexShardUpgradeStatusRequest(shard.shardId(), request); + protected UpgradeStatusResponse newResponse(UpgradeStatusRequest request, int totalShards, int successfulShards, int failedShards, List<ShardUpgradeStatus> responses, List<ShardOperationFailedException> shardFailures) { + return new UpgradeStatusResponse(responses.toArray(new ShardUpgradeStatus[responses.size()]), totalShards, successfulShards, failedShards, shardFailures); } @Override - protected ShardUpgradeStatus newShardResponse() { - return new ShardUpgradeStatus(); + protected UpgradeStatusRequest readRequestFrom(StreamInput in) throws IOException { + UpgradeStatusRequest request = new UpgradeStatusRequest(); + request.readFrom(in); + return request; } @Override - protected ShardUpgradeStatus shardOperation(IndexShardUpgradeStatusRequest request) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); + protected ShardUpgradeStatus shardOperation(UpgradeStatusRequest request, ShardRouting shardRouting) { + IndexService indexService = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()); + IndexShard indexShard = indexService.shardSafe(shardRouting.shardId().id()); List<Segment> segments = indexShard.engine().segments(false); long total_bytes = 0; long to_upgrade_bytes = 0; @@ -137,16 +115,4 @@ public class TransportUpgradeStatusAction extends TransportBroadcastAction<Upgra return new ShardUpgradeStatus(indexShard.routingEntry(), total_bytes, to_upgrade_bytes, to_upgrade_bytes_ancient); } - - static class IndexShardUpgradeStatusRequest extends BroadcastShardRequest { - - IndexShardUpgradeStatusRequest() { - - } - - IndexShardUpgradeStatusRequest(ShardId shardId, UpgradeStatusRequest request) { - super(shardId, request); - } - - } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java index a951924720d..de4ea72c663 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java @@ -21,10 +21,6 @@ package org.elasticsearch.action.admin.indices.upgrade.get; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; public class UpgradeStatusRequest extends BroadcastRequest<UpgradeStatusRequest> { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java index 89520704049..82683625df8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.upgrade.get; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; @@ -31,19 +30,17 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; public class UpgradeStatusResponse extends BroadcastResponse implements ToXContent { - - private ShardUpgradeStatus[] shards; private Map<String, IndexUpgradeStatus> indicesUpgradeStatus; UpgradeStatusResponse() { - } UpgradeStatusResponse(ShardUpgradeStatus[] shards, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { @@ -63,7 +60,7 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte } for (String index : indices) { - List<ShardUpgradeStatus> shards = Lists.newArrayList(); + List<ShardUpgradeStatus> shards = new ArrayList<>(); for (ShardUpgradeStatus shard : this.shards) { if (shard.getShardRouting().index().equals(index)) { shards.add(shard); @@ -75,7 +72,6 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte return indicesUpgradeStats; } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -120,8 +116,6 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - - builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, getTotalBytes()); builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, getToUpgradeBytes()); builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, getToUpgradeBytesAncient()); @@ -163,10 +157,8 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte } builder.endObject(); } - builder.endObject(); } - builder.endObject(); } return builder; @@ -186,6 +178,5 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT = new XContentBuilderString("size_to_upgrade_ancient"); static final XContentBuilderString SIZE_TO_UPGRADE_IN_BYTES = new XContentBuilderString("size_to_upgrade_in_bytes"); static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = new XContentBuilderString("size_to_upgrade_ancient_in_bytes"); - } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 39a736019f4..23d40a55cf0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -33,7 +33,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -42,12 +45,12 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; @@ -84,7 +87,7 @@ public class TransportUpgradeAction extends TransportBroadcastAction<UpgradeRequ } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index bed778bd4c9..6fefa0ddfd4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -54,14 +54,13 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -136,7 +135,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { @@ -144,7 +143,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid valid = valid && validateQueryResponse.isValid(); if (request.explain() || request.rewrite()) { if (queryExplanations == null) { - queryExplanations = newArrayList(); + queryExplanations = new ArrayList<>(); } queryExplanations.add(new QueryExplanation( validateQueryResponse.getIndex(), @@ -197,7 +196,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid valid = false; error = e.getMessage(); } finally { - SearchContext.current().close(); + searchContext.close(); SearchContext.removeCurrent(); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java index 54a0c4d4276..8df668d1087 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.action.admin.indices.warmer.delete; -import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -39,6 +38,7 @@ import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -92,6 +92,14 @@ public class TransportDeleteWarmerAction extends TransportMasterNodeAction<Delet MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); boolean globalFoundAtLeastOne = false; + boolean deleteAll = false; + for (int i=0; i<request.names().length; i++){ + if (request.names()[i].equals(MetaData.ALL)) { + deleteAll = true; + break; + } + } + for (String index : concreteIndices) { IndexMetaData indexMetaData = currentState.metaData().index(index); if (indexMetaData == null) { @@ -99,11 +107,11 @@ public class TransportDeleteWarmerAction extends TransportMasterNodeAction<Delet } IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE); if (warmers != null) { - List<IndexWarmersMetaData.Entry> entries = Lists.newArrayList(); + List<IndexWarmersMetaData.Entry> entries = new ArrayList<>(); for (IndexWarmersMetaData.Entry entry : warmers.entries()) { boolean keepWarmer = true; for (String warmer : request.names()) { - if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals("_all")) { + if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals(MetaData.ALL)) { globalFoundAtLeastOne = true; keepWarmer = false; // don't add it... @@ -123,7 +131,7 @@ public class TransportDeleteWarmerAction extends TransportMasterNodeAction<Delet } } - if (!globalFoundAtLeastOne) { + if (globalFoundAtLeastOne == false && deleteAll == false) { throw new IndexWarmerMissingException(request.names()); } @@ -137,11 +145,13 @@ public class TransportDeleteWarmerAction extends TransportMasterNodeAction<Delet if (warmers != null) { for (IndexWarmersMetaData.Entry entry : warmers.entries()) { for (String warmer : request.names()) { - if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals("_all")) { + if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals(MetaData.ALL)) { logger.info("[{}] delete warmer [{}]", index, entry.name()); } } } + } else if(deleteAll){ + logger.debug("no warmers to delete on index [{}]", index); } } } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 79abded861e..9a8419f80a8 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.bulk; -import com.google.common.collect.Lists; - import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; @@ -57,7 +55,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite private static final int REQUEST_OVERHEAD = 50; - final List<ActionRequest> requests = Lists.newArrayList(); + final List<ActionRequest> requests = new ArrayList<>(); List<Object> payloads = null; protected TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT; @@ -186,7 +184,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite @Override @SuppressWarnings("unchecked") public List<? extends IndicesRequest> subRequests() { - List<IndicesRequest> indicesRequests = Lists.newArrayList(); + List<IndicesRequest> indicesRequests = new ArrayList<>(); for (ActionRequest request : requests) { assert request instanceof IndicesRequest; indicesRequests.add((IndicesRequest) request); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 07427acfd82..fd78241db6b 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.bulk; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; @@ -57,7 +56,13 @@ import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; /** @@ -251,7 +256,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, indexRequest.type(), indexRequest.id(), indexRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { - list = Lists.newArrayList(); + list = new ArrayList<>(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); @@ -265,7 +270,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul for (ShardIterator shardIt : groupShards) { List<BulkItemRequest> list = requestsByShard.get(shardIt.shardId()); if (list == null) { - list = Lists.newArrayList(); + list = new ArrayList<>(); requestsByShard.put(shardIt.shardId(), list); } list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest))); @@ -274,7 +279,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul ShardId shardId = clusterService.operationRouting().deleteShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { - list = Lists.newArrayList(); + list = new ArrayList<>(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); @@ -292,7 +297,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, updateRequest.type(), updateRequest.id(), updateRequest.routing()).shardId(); List<BulkItemRequest> list = requestsByShard.get(shardId); if (list == null) { - list = Lists.newArrayList(); + list = new ArrayList<>(); requestsByShard.put(shardId, list); } list.add(new BulkItemRequest(i, request)); diff --git a/core/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java b/core/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java index 9d51904827a..d9c89e78328 100644 --- a/core/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java +++ b/core/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java @@ -54,13 +54,13 @@ import org.elasticsearch.search.query.QueryPhaseExecutionException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.action.exists.ExistsRequest.DEFAULT_MIN_SCORE; public class TransportExistsAction extends TransportBroadcastAction<ExistsRequest, ExistsResponse, ShardExistsRequest, ShardExistsResponse> { @@ -131,7 +131,7 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { @@ -174,9 +174,8 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques } context.preProcess(); try { - Lucene.EarlyTerminatingCollector existsCollector = Lucene.createExistsCollector(); - Lucene.exists(context.searcher(), context.query(), existsCollector); - return new ShardExistsResponse(request.shardId(), existsCollector.exists()); + boolean exists = Lucene.exists(context, context.query(), Lucene.createExistsCollector()); + return new ShardExistsResponse(request.shardId(), exists); } catch (Exception e) { throw new QueryPhaseExecutionException(context, "failed to execute exists", e); } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java index 2778e03d8bc..74537379d1d 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.action.percolate; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -35,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -50,7 +50,7 @@ public class MultiPercolateRequest extends ActionRequest<MultiPercolateRequest> private String[] indices; private String documentType; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); - private List<PercolateRequest> requests = Lists.newArrayList(); + private List<PercolateRequest> requests = new ArrayList<>(); /** * Embeds a percolate request to this multi percolate request @@ -163,7 +163,7 @@ public class MultiPercolateRequest extends ActionRequest<MultiPercolateRequest> @Override public List<? extends IndicesRequest> subRequests() { - List<IndicesRequest> indicesRequests = Lists.newArrayList(); + List<IndicesRequest> indicesRequests = new ArrayList<>(); for (PercolateRequest percolateRequest : this.requests) { indicesRequests.addAll(percolateRequest.subRequests()); } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java index 9c9a3859585..47f39cef0bd 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.action.percolate; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; @@ -35,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -79,7 +79,7 @@ public class PercolateRequest extends BroadcastRequest<PercolateRequest> impleme @Override public List<? extends IndicesRequest> subRequests() { - List<IndicesRequest> requests = Lists.newArrayList(); + List<IndicesRequest> requests = new ArrayList<>(); requests.add(this); if (getRequest != null) { requests.add(getRequest); diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index af8aa43837e..967d5a0a2b7 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.percolate; -import com.google.common.collect.Lists; - import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.client.Requests; @@ -38,6 +36,7 @@ import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -99,7 +98,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { */ public PercolateSourceBuilder addSort(SortBuilder sort) { if (sorts == null) { - sorts = Lists.newArrayList(); + sorts = new ArrayList<>(); } sorts.add(sort); return this; @@ -127,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { */ public PercolateSourceBuilder addAggregation(AbstractAggregationBuilder aggregationBuilder) { if (aggregations == null) { - aggregations = Lists.newArrayList(); + aggregations = new ArrayList<>(); } aggregations.add(aggregationBuilder); return this; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index bb160f84962..2194975161a 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -43,13 +43,12 @@ import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -126,7 +125,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { @@ -135,7 +134,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate if (!percolateShardResponse.isEmpty()) { if (shardResults == null) { percolatorTypeId = percolateShardResponse.percolatorTypeId(); - shardResults = newArrayList(); + shardResults = new ArrayList<>(); } shardResults.add(percolateShardResponse); } diff --git a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java index 4ffd5e29ceb..17343e86912 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java @@ -25,10 +25,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.action.ValidateActions.addValidationError; /** @@ -47,7 +47,7 @@ public class ClearScrollRequest extends ActionRequest<ClearScrollRequest> { public void addScrollId(String scrollId) { if (scrollIds == null) { - scrollIds = newArrayList(); + scrollIds = new ArrayList<>(); } scrollIds.add(scrollId); } diff --git a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 06e7d03b98d..df2e2424e71 100644 --- a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.search; -import com.google.common.collect.Lists; - import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; @@ -41,16 +39,14 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.*; /** * A multi search API request. */ public class MultiSearchRequest extends ActionRequest<MultiSearchRequest> implements CompositeIndicesRequest { - private List<SearchRequest> requests = Lists.newArrayList(); + private List<SearchRequest> requests = new ArrayList<>(); private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index 78410a1cffc..8e49f45760e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.type.ScrollIdForNode; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterService; @@ -27,7 +28,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; @@ -69,7 +69,7 @@ public class TransportClearScrollAction extends HandledTransportAction<ClearScro final DiscoveryNodes nodes; final CountDown expectedOps; final ClearScrollRequest request; - final List<Tuple<String, Long>[]> contexts = new ArrayList<>(); + final List<ScrollIdForNode[]> contexts = new ArrayList<>(); final ActionListener<ClearScrollResponse> listener; final AtomicReference<Throwable> expHolder; final AtomicInteger numberOfFreedSearchContexts = new AtomicInteger(0); @@ -81,7 +81,7 @@ public class TransportClearScrollAction extends HandledTransportAction<ClearScro expectedOps = nodes.size(); } else { for (String parsedScrollId : request.getScrollIds()) { - Tuple<String, Long>[] context = parseScrollId(parsedScrollId).getContext(); + ScrollIdForNode[] context = parseScrollId(parsedScrollId).getContext(); expectedOps += context.length; this.contexts.add(context); } @@ -114,15 +114,15 @@ public class TransportClearScrollAction extends HandledTransportAction<ClearScro }); } } else { - for (Tuple<String, Long>[] context : contexts) { - for (Tuple<String, Long> target : context) { - final DiscoveryNode node = nodes.get(target.v1()); + for (ScrollIdForNode[] context : contexts) { + for (ScrollIdForNode target : context) { + final DiscoveryNode node = nodes.get(target.getNode()); if (node == null) { onFreedContext(false); continue; } - searchServiceTransportAction.sendFreeContext(node, target.v2(), request, new ActionListener<SearchServiceTransportAction.SearchFreeContextResponse>() { + searchServiceTransportAction.sendFreeContext(node, target.getScrollId(), request, new ActionListener<SearchServiceTransportAction.SearchFreeContextResponse>() { @Override public void onResponse(SearchServiceTransportAction.SearchFreeContextResponse freed) { onFreedContext(freed.isFreed()); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/ParsedScrollId.java b/core/src/main/java/org/elasticsearch/action/search/type/ParsedScrollId.java index 57d5a0365ca..222727df4c1 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/ParsedScrollId.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/ParsedScrollId.java @@ -19,9 +19,6 @@ package org.elasticsearch.action.search.type; -import org.elasticsearch.Version; -import org.elasticsearch.common.collect.Tuple; - import java.util.Map; /** @@ -39,11 +36,11 @@ public class ParsedScrollId { private final String type; - private final Tuple<String, Long>[] context; + private final ScrollIdForNode[] context; private final Map<String, String> attributes; - public ParsedScrollId(String source, String type, Tuple<String, Long>[] context, Map<String, String> attributes) { + public ParsedScrollId(String source, String type, ScrollIdForNode[] context, Map<String, String> attributes) { this.source = source; this.type = type; this.context = context; @@ -58,7 +55,7 @@ public class ParsedScrollId { return type; } - public Tuple<String, Long>[] getContext() { + public ScrollIdForNode[] getContext() { return context; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java b/core/src/main/java/org/elasticsearch/action/search/type/ScrollIdForNode.java similarity index 68% rename from core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java rename to core/src/main/java/org/elasticsearch/action/search/type/ScrollIdForNode.java index 1c9dc4482d9..38c79c91519 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/ScrollIdForNode.java @@ -17,20 +17,22 @@ * under the License. */ -package org.elasticsearch.action.admin.indices.optimize; +package org.elasticsearch.action.search.type; -import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; -import org.elasticsearch.index.shard.ShardId; +public class ScrollIdForNode { + private final String node; + private final long scrollId; -/** - * - */ -class ShardOptimizeResponse extends BroadcastShardResponse { - - ShardOptimizeResponse() { + public ScrollIdForNode(String node, long scrollId) { + this.node = node; + this.scrollId = scrollId; } - ShardOptimizeResponse(ShardId shardId) { - super(shardId); + public String getNode() { + return node; } -} \ No newline at end of file + + public long getScrollId() { + return scrollId; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchHelper.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchHelper.java index 0f8da3a8ade..bb9e85810a2 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchHelper.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchHelper.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.internal.InternalScrollSearchRequest; @@ -103,14 +102,14 @@ public abstract class TransportSearchHelper { throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); } - @SuppressWarnings({"unchecked"}) Tuple<String, Long>[] context = new Tuple[contextSize]; + ScrollIdForNode[] context = new ScrollIdForNode[contextSize]; for (int i = 0; i < contextSize; i++) { String element = elements[index++]; int sep = element.indexOf(':'); if (sep == -1) { throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); } - context[i] = new Tuple<>(element.substring(sep + 1), Long.parseLong(element.substring(0, sep))); + context[i] = new ScrollIdForNode(element.substring(sep + 1), Long.parseLong(element.substring(0, sep))); } Map<String, String> attributes; int attributesSize = Integer.parseInt(elements[index++]); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java index 1683a6bbf6d..bb2c82d8831 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -115,15 +114,15 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent return; } - Tuple<String, Long>[] context = scrollId.getContext(); + ScrollIdForNode[] context = scrollId.getContext(); for (int i = 0; i < context.length; i++) { - Tuple<String, Long> target = context[i]; - DiscoveryNode node = nodes.get(target.v1()); + ScrollIdForNode target = context[i]; + DiscoveryNode node = nodes.get(target.getNode()); if (node != null) { - executePhase(i, node, target.v2()); + executePhase(i, node, target.getScrollId()); } else { if (logger.isDebugEnabled()) { - logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]"); + logger.debug("Node [" + target.getNode() + "] not available for scroll request [" + scrollId.getSource() + "]"); } successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { @@ -132,11 +131,11 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent } } - for (Tuple<String, Long> target : scrollId.getContext()) { - DiscoveryNode node = nodes.get(target.v1()); + for (ScrollIdForNode target : scrollId.getContext()) { + DiscoveryNode node = nodes.get(target.getNode()); if (node == null) { if (logger.isDebugEnabled()) { - logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]"); + logger.debug("Node [" + target.getNode() + "] not available for scroll request [" + scrollId.getSource() + "]"); } successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index 84d631e24c2..9c7742615c4 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -124,15 +123,15 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent } final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length); - Tuple<String, Long>[] context = scrollId.getContext(); + ScrollIdForNode[] context = scrollId.getContext(); for (int i = 0; i < context.length; i++) { - Tuple<String, Long> target = context[i]; - DiscoveryNode node = nodes.get(target.v1()); + ScrollIdForNode target = context[i]; + DiscoveryNode node = nodes.get(target.getNode()); if (node != null) { - executeQueryPhase(i, counter, node, target.v2()); + executeQueryPhase(i, counter, node, target.getScrollId()); } else { if (logger.isDebugEnabled()) { - logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]"); + logger.debug("Node [" + target.getNode() + "] not available for scroll request [" + scrollId.getSource() + "]"); } successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java index 0be39abba14..16ab26f7d46 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -125,15 +124,15 @@ public class TransportSearchScrollScanAction extends AbstractComponent { return; } - Tuple<String, Long>[] context = scrollId.getContext(); + ScrollIdForNode[] context = scrollId.getContext(); for (int i = 0; i < context.length; i++) { - Tuple<String, Long> target = context[i]; - DiscoveryNode node = nodes.get(target.v1()); + ScrollIdForNode target = context[i]; + DiscoveryNode node = nodes.get(target.getNode()); if (node != null) { - executePhase(i, node, target.v2()); + executePhase(i, node, target.getScrollId()); } else { if (logger.isDebugEnabled()) { - logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]"); + logger.debug("Node [" + target.getScrollId() + "] not available for scroll request [" + scrollId.getSource() + "]"); } successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { @@ -142,11 +141,11 @@ public class TransportSearchScrollScanAction extends AbstractComponent { } } - for (Tuple<String, Long> target : scrollId.getContext()) { - DiscoveryNode node = nodes.get(target.v1()); + for (ScrollIdForNode target : scrollId.getContext()) { + DiscoveryNode node = nodes.get(target.getNode()); if (node == null) { if (logger.isDebugEnabled()) { - logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]"); + logger.debug("Node [" + target.getNode() + "] not available for scroll request [" + scrollId.getSource() + "]"); } successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 2269856c072..c584c8856a5 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -48,14 +48,13 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * Defines the transport of a suggestion request across the cluster */ @@ -115,7 +114,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java b/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java index 7d4497d4bd6..790689b4206 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java +++ b/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.support; -import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.List; /** @@ -66,7 +66,7 @@ public abstract class AbstractListenableActionFuture<T, L> extends AdapterAction ((List) this.listeners).add(listener); } else { Object orig = listeners; - listeners = Lists.newArrayListWithCapacity(2); + listeners = new ArrayList<>(2); ((List) listeners).add(orig); ((List) listeners).add(listener); } diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index dcb6952dc35..2386a82650a 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -47,7 +47,6 @@ import java.util.concurrent.atomic.AtomicReferenceArray; public abstract class TransportBroadcastAction<Request extends BroadcastRequest, Response extends BroadcastResponse, ShardRequest extends BroadcastShardRequest, ShardResponse extends BroadcastShardResponse> extends HandledTransportAction<Request, Response> { - protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; @@ -59,7 +58,6 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest, super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); this.clusterService = clusterService; this.transportService = transportService; - this.threadPool = threadPool; this.transportShardAction = actionName + "[s]"; transportService.registerRequestHandler(transportShardAction, shardRequest, shardExecutor, new ShardTransportHandler()); diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java new file mode 100644 index 00000000000..eee14a61123 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -0,0 +1,559 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.broadcast.node; + +import com.google.common.collect.Maps; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.NodeShouldNotConnectException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; + +/** + * Abstraction for transporting aggregated shard-level operations in a single request (NodeRequest) per-node + * and executing the shard-level operations serially on the receiving node. Each shard-level operation can produce a + * result (ShardOperationResult), these per-node shard-level results are aggregated into a single result + * (BroadcastByNodeResponse) to the coordinating node. These per-node results are aggregated into a single result (Result) + * to the client. + * + * @param <Request> the underlying client request + * @param <Response> the response to the client request + * @param <ShardOperationResult> per-shard operation results + */ +public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRequest, + Response extends BroadcastResponse, + ShardOperationResult extends Streamable> extends HandledTransportAction<Request, Response> { + + private final ClusterService clusterService; + private final TransportService transportService; + + final String transportNodeBroadcastAction; + + public TransportBroadcastByNodeAction( + Settings settings, + String actionName, + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Class<Request> request, + String executor) { + super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + + this.clusterService = clusterService; + this.transportService = transportService; + + transportNodeBroadcastAction = actionName + "[n]"; + + transportService.registerRequestHandler(transportNodeBroadcastAction, new Callable<NodeRequest>() { + @Override + public NodeRequest call() throws Exception { + return new NodeRequest(); + } + }, executor, new BroadcastByNodeTransportRequestHandler()); + } + + private final Response newResponse(Request request, AtomicReferenceArray responses, List<NoShardAvailableActionException> unavailableShardExceptions, Map<String, List<ShardRouting>> nodes) { + int totalShards = 0; + int successfulShards = 0; + List<ShardOperationResult> broadcastByNodeResponses = new ArrayList<>(); + List<ShardOperationFailedException> exceptions = new ArrayList<>(); + for (int i = 0; i < responses.length(); i++) { + if (responses.get(i) instanceof FailedNodeException) { + FailedNodeException exception = (FailedNodeException) responses.get(i); + totalShards += nodes.get(exception.nodeId()).size(); + for (ShardRouting shard : nodes.get(exception.nodeId())) { + exceptions.add(new DefaultShardOperationFailedException(shard.getIndex(), shard.getId(), exception)); + } + } else { + NodeResponse response = (NodeResponse) responses.get(i); + broadcastByNodeResponses.addAll(response.results); + totalShards += response.getTotalShards(); + successfulShards += response.getSuccessfulShards(); + for (BroadcastShardOperationFailedException throwable : response.getExceptions()) { + if (!TransportActions.isShardNotAvailableException(throwable)) { + exceptions.add(new DefaultShardOperationFailedException(throwable.getIndex(), throwable.getShardId().getId(), throwable)); + } + } + } + } + totalShards += unavailableShardExceptions.size(); + int failedShards = exceptions.size(); + return newResponse(request, totalShards, successfulShards, failedShards, broadcastByNodeResponses, exceptions); + } + + /** + * Deserialize a shard-level result from an input stream + * + * @param in input stream + * @return a deserialized shard-level result + * @throws IOException + */ + protected abstract ShardOperationResult readShardResult(StreamInput in) throws IOException; + + /** + * Creates a new response to the underlying request. + * + * @param request the underlying request + * @param totalShards the total number of shards considered for execution of the operation + * @param successfulShards the total number of shards for which execution of the operation was successful + * @param failedShards the total number of shards for which execution of the operation failed + * @param results the per-node aggregated shard-level results + * @param shardFailures the exceptions corresponding to shard operationa failures + * @return the response + */ + protected abstract Response newResponse(Request request, int totalShards, int successfulShards, int failedShards, List<ShardOperationResult> results, List<ShardOperationFailedException> shardFailures); + + /** + * Deserialize a request from an input stream + * + * @param in input stream + * @return a de-serialized request + * @throws IOException + */ + protected abstract Request readRequestFrom(StreamInput in) throws IOException; + + /** + * Executes the shard-level operation. This method is called once per shard serially on the receiving node. + * + * @param request the node-level request + * @param shardRouting the shard on which to execute the operation + * @return the result of the shard-level operation for the shard + */ + protected abstract ShardOperationResult shardOperation(Request request, ShardRouting shardRouting); + + /** + * Determines the shards on which this operation will be executed on. The operation is executed once per shard. + * + * @param clusterState the cluster state + * @param request the underlying request + * @param concreteIndices the concrete indices on which to execute the operation + * @return the shards on which to execute the operation + */ + protected abstract ShardsIterator shards(ClusterState clusterState, Request request, String[] concreteIndices); + + /** + * Executes a global block check before polling the cluster state. + * + * @param state the cluster state + * @param request the underlying request + * @return a non-null exception if the operation is blocked + */ + protected abstract ClusterBlockException checkGlobalBlock(ClusterState state, Request request); + + /** + * Executes a global request-level check before polling the cluster state. + * + * @param state the cluster state + * @param request the underlying request + * @param concreteIndices the concrete indices on which to execute the operation + * @return a non-null exception if the operation if blocked + */ + protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices); + + @Override + protected void doExecute(Request request, ActionListener<Response> listener) { + new AsyncAction(request, listener).start(); + } + + protected class AsyncAction { + private final Request request; + private final ActionListener<Response> listener; + private final ClusterState clusterState; + private final DiscoveryNodes nodes; + private final Map<String, List<ShardRouting>> nodeIds; + private final AtomicReferenceArray<Object> responses; + private final AtomicInteger counter = new AtomicInteger(); + private List<NoShardAvailableActionException> unavailableShardExceptions = new ArrayList<>(); + + protected AsyncAction(Request request, ActionListener<Response> listener) { + this.request = request; + this.listener = listener; + + clusterState = clusterService.state(); + nodes = clusterState.nodes(); + + ClusterBlockException globalBlockException = checkGlobalBlock(clusterState, request); + if (globalBlockException != null) { + throw globalBlockException; + } + + String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); + ClusterBlockException requestBlockException = checkRequestBlock(clusterState, request, concreteIndices); + if (requestBlockException != null) { + throw requestBlockException; + } + + logger.trace("resolving shards for [{}] based on cluster state version [{}]", actionName, clusterState.version()); + ShardsIterator shardIt = shards(clusterState, request, concreteIndices); + nodeIds = Maps.newHashMap(); + + for (ShardRouting shard : shardIt.asUnordered()) { + if (shard.assignedToNode()) { + String nodeId = shard.currentNodeId(); + if (!nodeIds.containsKey(nodeId)) { + nodeIds.put(nodeId, new ArrayList<ShardRouting>()); + } + nodeIds.get(nodeId).add(shard); + } else { + unavailableShardExceptions.add( + new NoShardAvailableActionException( + shard.shardId(), + " no shards available for shard " + shard.toString() + " while executing " + actionName + ) + ); + } + } + + responses = new AtomicReferenceArray<>(nodeIds.size()); + } + + public void start() { + if (nodeIds.size() == 0) { + try { + onCompletion(); + } catch (Throwable e) { + listener.onFailure(e); + } + } else { + int nodeIndex = -1; + for (Map.Entry<String, List<ShardRouting>> entry : nodeIds.entrySet()) { + nodeIndex++; + DiscoveryNode node = nodes.get(entry.getKey()); + sendNodeRequest(node, entry.getValue(), nodeIndex); + } + } + } + + private void sendNodeRequest(final DiscoveryNode node, List<ShardRouting> shards, final int nodeIndex) { + try { + NodeRequest nodeRequest = new NodeRequest(node.getId(), request, shards); + transportService.sendRequest(node, transportNodeBroadcastAction, nodeRequest, new BaseTransportResponseHandler<NodeResponse>() { + @Override + public NodeResponse newInstance() { + return new NodeResponse(); + } + + @Override + public void handleResponse(NodeResponse response) { + onNodeResponse(node, nodeIndex, response); + } + + @Override + public void handleException(TransportException exp) { + onNodeFailure(node, nodeIndex, exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + } catch (Throwable e) { + onNodeFailure(node, nodeIndex, e); + } + } + + protected void onNodeResponse(DiscoveryNode node, int nodeIndex, NodeResponse response) { + logger.trace("received response for [{}] from node [{}]", actionName, node.id()); + + // this is defensive to protect against the possibility of double invocation + // the current implementation of TransportService#sendRequest guards against this + // but concurrency is hard, safety is important, and the small performance loss here does not matter + if (responses.compareAndSet(nodeIndex, null, response)) { + if (counter.incrementAndGet() == responses.length()) { + onCompletion(); + } + } + } + + protected void onNodeFailure(DiscoveryNode node, int nodeIndex, Throwable t) { + String nodeId = node.id(); + if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) { + logger.debug("failed to execute [{}] on node [{}]", t, actionName, nodeId); + } + + // this is defensive to protect against the possibility of double invocation + // the current implementation of TransportService#sendRequest guards against this + // but concurrency is hard, safety is important, and the small performance loss here does not matter + if (responses.compareAndSet(nodeIndex, null, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t))) { + if (counter.incrementAndGet() == responses.length()) { + onCompletion(); + } + } + } + + protected void onCompletion() { + Response response = null; + try { + response = newResponse(request, responses, unavailableShardExceptions, nodeIds); + } catch (Throwable t) { + logger.debug("failed to combine responses from nodes", t); + listener.onFailure(t); + } + if (response != null) { + try { + listener.onResponse(response); + } catch (Throwable t) { + listener.onFailure(t); + } + } + } + } + + class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler<NodeRequest> { + @Override + public void messageReceived(final NodeRequest request, TransportChannel channel) throws Exception { + List<ShardRouting> shards = request.getShards(); + final int totalShards = shards.size(); + logger.trace("[{}] executing operation on [{}] shards", actionName, totalShards); + final Object[] shardResultOrExceptions = new Object[totalShards]; + + int shardIndex = -1; + for (final ShardRouting shardRouting : shards) { + shardIndex++; + onShardOperation(request, shardResultOrExceptions, shardIndex, shardRouting); + } + + List<BroadcastShardOperationFailedException> accumulatedExceptions = new ArrayList<>(); + List<ShardOperationResult> results = new ArrayList<>(); + for (int i = 0; i < totalShards; i++) { + if (shardResultOrExceptions[i] instanceof BroadcastShardOperationFailedException) { + accumulatedExceptions.add((BroadcastShardOperationFailedException) shardResultOrExceptions[i]); + } else { + results.add((ShardOperationResult) shardResultOrExceptions[i]); + } + } + + channel.sendResponse(new NodeResponse(request.getNodeId(), totalShards, results, accumulatedExceptions)); + } + + private void onShardOperation(final NodeRequest request, final Object[] shardResults, final int shardIndex, final ShardRouting shardRouting) { + try { + logger.trace("[{}] executing operation for shard [{}]", actionName, shardRouting.shortSummary()); + ShardOperationResult result = shardOperation(request.indicesLevelRequest, shardRouting); + shardResults[shardIndex] = result; + logger.trace("[{}] completed operation for shard [{}]", actionName, shardRouting.shortSummary()); + } catch (Throwable t) { + BroadcastShardOperationFailedException e = new BroadcastShardOperationFailedException(shardRouting.shardId(), "operation " + actionName + " failed", t); + e.setIndex(shardRouting.getIndex()); + e.setShard(shardRouting.shardId()); + shardResults[shardIndex] = e; + logger.debug("[{}] failed to execute operation for shard [{}]", e, actionName, shardRouting.shortSummary()); + } + } + } + + protected class NodeRequest extends TransportRequest implements IndicesRequest { + private String nodeId; + + private List<ShardRouting> shards; + + protected Request indicesLevelRequest; + + protected NodeRequest() { + } + + public NodeRequest(String nodeId, Request request, List<ShardRouting> shards) { + super(request); + this.indicesLevelRequest = request; + this.shards = shards; + this.nodeId = nodeId; + } + + public List<ShardRouting> getShards() { + return shards; + } + + public String getNodeId() { + return nodeId; + } + + public String[] indices() { + return indicesLevelRequest.indices(); + } + + public IndicesOptions indicesOptions() { + return indicesLevelRequest.indicesOptions(); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + indicesLevelRequest = readRequestFrom(in); + int size = in.readVInt(); + shards = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + shards.add(ShardRouting.readShardRoutingEntry(in)); + } + nodeId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + indicesLevelRequest.writeTo(out); + int size = shards.size(); + out.writeVInt(size); + for (int i = 0; i < size; i++) { + shards.get(i).writeTo(out); + } + out.writeString(nodeId); + } + } + + class NodeResponse extends TransportResponse { + protected String nodeId; + protected int totalShards; + protected List<BroadcastShardOperationFailedException> exceptions; + protected List<ShardOperationResult> results; + + public NodeResponse() { + } + + public NodeResponse(String nodeId, + int totalShards, + List<ShardOperationResult> results, + List<BroadcastShardOperationFailedException> exceptions) { + this.nodeId = nodeId; + this.totalShards = totalShards; + this.results = results; + this.exceptions = exceptions; + } + + public String getNodeId() { + return nodeId; + } + + public int getTotalShards() { + return totalShards; + } + + public int getSuccessfulShards() { + return results.size(); + } + + public List<BroadcastShardOperationFailedException> getExceptions() { + return exceptions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + nodeId = in.readString(); + totalShards = in.readVInt(); + int resultsSize = in.readVInt(); + results = new ArrayList<>(resultsSize); + for (; resultsSize > 0; resultsSize--) { + final ShardOperationResult result = in.readBoolean() ? readShardResult(in) : null; + results.add(result); + } + if (in.readBoolean()) { + int failureShards = in.readVInt(); + exceptions = new ArrayList<>(failureShards); + for (int i = 0; i < failureShards; i++) { + exceptions.add(new BroadcastShardOperationFailedException(in)); + } + } else { + exceptions = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(nodeId); + out.writeVInt(totalShards); + out.writeVInt(results.size()); + for (ShardOperationResult result : results) { + out.writeOptionalStreamable(result); + } + out.writeBoolean(exceptions != null); + if (exceptions != null) { + int failureShards = exceptions.size(); + out.writeVInt(failureShards); + for (int i = 0; i < failureShards; i++) { + exceptions.get(i).writeTo(out); + } + } + } + } + + /** + * Can be used for implementations of {@link #shardOperation(BroadcastRequest, ShardRouting) shardOperation} for + * which there is no shard-level return value. + */ + public final static class EmptyResult implements Streamable { + public static EmptyResult INSTANCE = new EmptyResult(); + + private EmptyResult() { + } + + @Override + public void readFrom(StreamInput in) throws IOException { + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + public static EmptyResult readEmptyResultFrom(StreamInput in) { + return INSTANCE; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 48a6ed2bc5a..50c5a3f2582 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -97,7 +97,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques listener.onFailure(blockException); return; } - logger.trace("can't execute due to a cluster block: [{}], retrying", blockException); + logger.trace("can't execute due to a cluster block, retrying", blockException); observer.waitForNextChange( new ClusterStateObserver.Listener() { @Override diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 39a3dae7569..93907cf0aa6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -46,7 +46,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A protected TimeValue timeout = DEFAULT_TIMEOUT; protected String index; - private boolean threadedOperation = true; private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT; private volatile boolean canHaveDuplicates = false; @@ -76,7 +75,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A super(originalRequest); this.timeout = request.timeout(); this.index = request.index(); - this.threadedOperation = request.operationThreaded(); this.consistencyLevel = request.consistencyLevel(); } @@ -91,23 +89,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A return canHaveDuplicates; } - /** - * Controls if the operation will be executed on a separate thread when executed locally. - */ - public final boolean operationThreaded() { - return threadedOperation; - } - - /** - * Controls if the operation will be executed on a separate thread when executed locally. Defaults - * to <tt>true</tt> when running in embedded mode. - */ - @SuppressWarnings("unchecked") - public final T operationThreaded(boolean threadedOperation) { - this.threadedOperation = threadedOperation; - return (T) this; - } - /** * A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>. */ diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 7762eaee7dc..bafb33be6a7 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -35,16 +35,6 @@ public abstract class ReplicationRequestBuilder<Request extends ReplicationReque super(client, action, request); } - /** - * Controls if the operation will be executed on a separate thread when executed locally. Defaults - * to <tt>true</tt> when running in embedded mode. - */ - @SuppressWarnings("unchecked") - public final RequestBuilder setOperationThreaded(boolean threadedOperation) { - request.operationThreaded(threadedOperation); - return (RequestBuilder) this; - } - /** * A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>. */ diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index bc4094acc54..155c30756ca 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -211,8 +211,6 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ class OperationTransportHandler implements TransportRequestHandler<Request> { @Override public void messageReceived(final Request request, final TransportChannel channel) throws Exception { - // if we have a local operation, execute it on a thread since we don't spawn - request.operationThreaded(true); execute(request, new ActionListener<Response>() { @Override public void onResponse(Response result) { @@ -440,21 +438,17 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ protected void routeRequestOrPerformLocally(final ShardRouting primary, final ShardIterator shardsIt) { if (primary.currentNodeId().equals(observer.observedState().nodes().localNodeId())) { try { - if (internalRequest.request().operationThreaded()) { - threadPool.executor(executor).execute(new AbstractRunnable() { - @Override - public void onFailure(Throwable t) { - finishAsFailed(t); - } + threadPool.executor(executor).execute(new AbstractRunnable() { + @Override + public void onFailure(Throwable t) { + finishAsFailed(t); + } - @Override - protected void doRun() throws Exception { - performOnPrimary(primary, shardsIt); - } - }); - } else { - performOnPrimary(primary, shardsIt); - } + @Override + protected void doRun() throws Exception { + performOnPrimary(primary, shardsIt); + } + }); } catch (Throwable t) { finishAsFailed(t); } @@ -506,9 +500,6 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ finishAsFailed(failure); return; } - // make it threaded operation so we fork on the discovery listener thread - internalRequest.request().operationThreaded(true); - observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { @@ -904,43 +895,33 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ }); } else { - if (replicaRequest.operationThreaded()) { - try { - threadPool.executor(executor).execute(new AbstractRunnable() { - @Override - protected void doRun() { - try { - shardOperationOnReplica(shard.shardId(), replicaRequest); - onReplicaSuccess(); - } catch (Throwable e) { - onReplicaFailure(nodeId, e); - failReplicaIfNeeded(shard.index(), shard.id(), e); - } + try { + threadPool.executor(executor).execute(new AbstractRunnable() { + @Override + protected void doRun() { + try { + shardOperationOnReplica(shard.shardId(), replicaRequest); + onReplicaSuccess(); + } catch (Throwable e) { + onReplicaFailure(nodeId, e); + failReplicaIfNeeded(shard.index(), shard.id(), e); } + } - // we must never reject on because of thread pool capacity on replicas - @Override - public boolean isForceExecution() { - return true; - } + // we must never reject on because of thread pool capacity on replicas + @Override + public boolean isForceExecution() { + return true; + } - @Override - public void onFailure(Throwable t) { - onReplicaFailure(nodeId, t); - } - }); - } catch (Throwable e) { - failReplicaIfNeeded(shard.index(), shard.id(), e); - onReplicaFailure(nodeId, e); - } - } else { - try { - shardOperationOnReplica(shard.shardId(), replicaRequest); - onReplicaSuccess(); - } catch (Throwable e) { - failReplicaIfNeeded(shard.index(), shard.id(), e); - onReplicaFailure(nodeId, e); - } + @Override + public void onFailure(Throwable t) { + onReplicaFailure(nodeId, t); + } + }); + } catch (Throwable e) { + failReplicaIfNeeded(shard.index(), shard.id(), e); + onReplicaFailure(nodeId, e); } } } diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java index d3bf24de577..32e9d05e497 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java @@ -42,13 +42,12 @@ import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; -import static com.google.common.collect.Lists.newArrayList; - /** * Get the dfs only with no fetch phase. This is for internal use only. */ @@ -115,7 +114,7 @@ public class TransportDfsOnlyAction extends TransportBroadcastAction<DfsOnlyRequ } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { - shardFailures = newArrayList(); + shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index c9401ca5392..fba2f23852e 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -131,7 +131,6 @@ public class UpdateHelper extends AbstractComponent { .routing(request.routing()) .parent(request.parent()) .consistencyLevel(request.consistencyLevel()); - indexRequest.operationThreaded(false); if (request.versionType() != VersionType.INTERNAL) { // in all but the internal versioning mode, we want to create the new document using the given version. indexRequest.version(request.version()).versionType(request.versionType()); @@ -227,13 +226,11 @@ public class UpdateHelper extends AbstractComponent { .consistencyLevel(request.consistencyLevel()) .timestamp(timestamp).ttl(ttl) .refresh(request.refresh()); - indexRequest.operationThreaded(false); return new Result(indexRequest, Operation.INDEX, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .version(updateVersion).versionType(request.versionType()) .consistencyLevel(request.consistencyLevel()); - deleteRequest.operationThreaded(false); return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index a5874dc7846..06df386828e 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -78,7 +78,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> private boolean scriptedUpsert = false; private boolean docAsUpsert = false; - private boolean detectNoop = false; + private boolean detectNoop = true; @Nullable private IndexRequest doc; @@ -243,7 +243,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> * The script to execute. Note, make sure not to send different script each * times and instead use script params if possible with the same * (automatically compiled) script. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -256,7 +256,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> * The script to execute. Note, make sure not to send different script each * times and instead use script params if possible with the same * (automatically compiled) script. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -267,7 +267,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> /** * The language of the script to execute. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -286,7 +286,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> /** * Add a script parameter. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -311,7 +311,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> /** * Sets the script parameters to use with the script. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -338,7 +338,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> * The script to execute. Note, make sure not to send different script each * times and instead use script params if possible with the same * (automatically compiled) script. - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -360,7 +360,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> * The script type * @param scriptParams * The script parameters - * + * * @deprecated Use {@link #script(Script)} instead */ @Deprecated @@ -623,7 +623,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> } /** - * Should this update attempt to detect if it is a noop? + * Should this update attempt to detect if it is a noop? Defaults to true. * @return this for chaining */ public UpdateRequest detectNoop(boolean detectNoop) { @@ -631,6 +631,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> return this; } + /** + * Should this update attempt to detect if it is a noop? Defaults to true. + */ public boolean detectNoop() { return detectNoop; } @@ -699,16 +702,15 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> this.docAsUpsert = shouldUpsertDoc; return this; } - + public boolean scriptedUpsert(){ return this.scriptedUpsert; } - + public UpdateRequest scriptedUpsert(boolean scriptedUpsert) { this.scriptedUpsert = scriptedUpsert; return this; } - @Override public void readFrom(StreamInput in) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index d4d0ae18963..918d2d7b935 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -308,6 +308,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U /** * Sets whether to perform extra effort to detect noop updates via docAsUpsert. + * Defautls to true. */ public UpdateRequestBuilder setDetectNoop(boolean detectNoop) { request.detectNoop(detectNoop); @@ -322,4 +323,14 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U request.scriptedUpsert(scriptedUpsert); return this; } + + /** + * Set the new ttl of the document. Note that if detectNoop is true (the default) + * and the source of the document isn't changed then the ttl update won't take + * effect. + */ + public UpdateRequestBuilder setTtl(Long ttl) { + request.doc().ttl(ttl); + return this; + } } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 4ffc2abae2b..8e93ded7b61 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -21,7 +21,6 @@ package org.elasticsearch.client.transport; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -43,9 +42,20 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.FutureTransportResponseHandler; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledFuture; @@ -143,7 +153,7 @@ public class TransportClientNodesService extends AbstractComponent { if (closed) { throw new IllegalStateException("transport client is closed, can't add an address"); } - List<TransportAddress> filtered = Lists.newArrayListWithExpectedSize(transportAddresses.length); + List<TransportAddress> filtered = new ArrayList<>(transportAddresses.length); for (TransportAddress transportAddress : transportAddresses) { boolean found = false; for (DiscoveryNode otherNode : listedNodes) { diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java b/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java index 1ceb822abfd..209781d78f5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java @@ -21,11 +21,11 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -93,7 +93,7 @@ public class ClusterChangedEvent { String index = cursor.value; if (!previousState.metaData().hasIndex(index)) { if (created == null) { - created = Lists.newArrayList(); + created = new ArrayList<>(); } created.add(index); } @@ -126,7 +126,7 @@ public class ClusterChangedEvent { String index = cursor.value; if (!state.metaData().hasIndex(index)) { if (deleted == null) { - deleted = Lists.newArrayList(); + deleted = new ArrayList<>(); } deleted.add(index); } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/core/src/main/java/org/elasticsearch/cluster/ClusterInfo.java index f21bc9f052e..31f8fa0616c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterInfo.java @@ -32,28 +32,53 @@ import java.util.Map; */ public class ClusterInfo { - private final Map<String, DiskUsage> usages; + private final Map<String, DiskUsage> leastAvailableSpaceUsage; + private final Map<String, DiskUsage> mostAvailabeSpaceUsage; final Map<String, Long> shardSizes; public static final ClusterInfo EMPTY = new ClusterInfo(); - private ClusterInfo() { - this.usages = Collections.emptyMap(); - this.shardSizes = Collections.emptyMap(); + protected ClusterInfo() { + this(Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP); } - public ClusterInfo(Map<String, DiskUsage> usages, Map<String, Long> shardSizes) { - this.usages = usages; + /** + * Creates a new ClusterInfo instance. + * + * @param leastAvailableSpaceUsage a node id to disk usage mapping for the path that has the least available space on the node. + * @param mostAvailableSpaceUsage a node id to disk usage mapping for the path that has the most available space on the node. + * @param shardSizes a shardkey to size in bytes mapping per shard. + * @see #shardIdentifierFromRouting + */ + public ClusterInfo(final Map<String, DiskUsage> leastAvailableSpaceUsage, final Map<String, DiskUsage> mostAvailableSpaceUsage, final Map<String, Long> shardSizes) { + this.leastAvailableSpaceUsage = leastAvailableSpaceUsage; this.shardSizes = shardSizes; + this.mostAvailabeSpaceUsage = mostAvailableSpaceUsage; } - public Map<String, DiskUsage> getNodeDiskUsages() { - return this.usages; + /** + * Returns a node id to disk usage mapping for the path that has the least available space on the node. + */ + public Map<String, DiskUsage> getNodeLeastAvailableDiskUsages() { + return this.leastAvailableSpaceUsage; } + /** + * Returns a node id to disk usage mapping for the path that has the most available space on the node. + */ + public Map<String, DiskUsage> getNodeMostAvailableDiskUsages() { + return this.mostAvailabeSpaceUsage; + } + + /** + * Returns the shard size for the given shard routing or <code>null</code> it that metric is not available. + */ public Long getShardSize(ShardRouting shardRouting) { return shardSizes.get(shardIdentifierFromRouting(shardRouting)); } + /** + * Returns the shard size for the given shard routing or <code>defaultValue</code> it that metric is not available. + */ public long getShardSize(ShardRouting shardRouting, long defaultValue) { Long shardSize = getShardSize(shardRouting); return shardSize == null ? defaultValue : shardSize; diff --git a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java index 4e912a34f97..60095de09a3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java +++ b/core/src/main/java/org/elasticsearch/cluster/DiffableUtils.java @@ -27,11 +27,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; public final class DiffableUtils { @@ -232,13 +232,13 @@ public final class DiffableUtils { protected final Map<String, T> adds; protected MapDiff() { - deletes = newArrayList(); + deletes = new ArrayList<>(); diffs = newHashMap(); adds = newHashMap(); } protected MapDiff(StreamInput in, KeyedReader<T> reader) throws IOException { - deletes = newArrayList(); + deletes = new ArrayList<>(); diffs = newHashMap(); adds = newHashMap(); int deletesCount = in.readVInt(); diff --git a/core/src/main/java/org/elasticsearch/cluster/DiskUsage.java b/core/src/main/java/org/elasticsearch/cluster/DiskUsage.java index 92725b08831..e91adae9e34 100644 --- a/core/src/main/java/org/elasticsearch/cluster/DiskUsage.java +++ b/core/src/main/java/org/elasticsearch/cluster/DiskUsage.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; public class DiskUsage { final String nodeId; final String nodeName; + final String path; final long totalBytes; final long freeBytes; @@ -35,11 +36,12 @@ public class DiskUsage { * Create a new DiskUsage, if {@code totalBytes} is 0, {@get getFreeDiskAsPercentage} * will always return 100.0% free */ - public DiskUsage(String nodeId, String nodeName, long totalBytes, long freeBytes) { + public DiskUsage(String nodeId, String nodeName, String path, long totalBytes, long freeBytes) { this.nodeId = nodeId; this.nodeName = nodeName; this.freeBytes = freeBytes; this.totalBytes = totalBytes; + this.path = path; } public String getNodeId() { @@ -50,6 +52,10 @@ public class DiskUsage { return nodeName; } + public String getPath() { + return path; + } + public double getFreeDiskAsPercentage() { // We return 100.0% in order to fail "open", in that if we have invalid // numbers for the total bytes, it's as if we don't know disk usage. @@ -77,7 +83,7 @@ public class DiskUsage { @Override public String toString() { - return "[" + nodeId + "][" + nodeName + "] free: " + new ByteSizeValue(getFreeBytes()) + + return "[" + nodeId + "][" + nodeName + "][" + path + "] free: " + new ByteSizeValue(getFreeBytes()) + "[" + Strings.format1Decimals(getFreeDiskAsPercentage(), "%") + "]"; } } diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 8be9465f7b2..2be8a36e9e5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -35,9 +34,9 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.node.settings.NodeSettingsService; @@ -47,7 +46,6 @@ import org.elasticsearch.transport.ReceiveTimeoutTransportException; import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; /** * InternalClusterInfoService provides the ClusterInfoService interface, @@ -67,7 +65,8 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private volatile TimeValue updateFrequency; - private volatile Map<String, DiskUsage> usages; + private volatile Map<String, DiskUsage> leastAvailableSpaceUsages; + private volatile Map<String, DiskUsage> mostAvailableSpaceUsages; private volatile Map<String, Long> shardSizes; private volatile boolean isMaster = false; private volatile boolean enabled; @@ -84,7 +83,8 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService, ThreadPool threadPool) { super(settings); - this.usages = Collections.emptyMap(); + this.leastAvailableSpaceUsages = Collections.emptyMap(); + this.mostAvailableSpaceUsages = Collections.emptyMap(); this.shardSizes = Collections.emptyMap(); this.transportNodesStatsAction = transportNodesStatsAction; this.transportIndicesStatsAction = transportIndicesStatsAction; @@ -200,9 +200,16 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu if (logger.isTraceEnabled()) { logger.trace("Removing node from cluster info: {}", removedNode.getId()); } - Map<String, DiskUsage> newUsages = new HashMap<>(usages); - newUsages.remove(removedNode.getId()); - usages = Collections.unmodifiableMap(newUsages); + if (leastAvailableSpaceUsages.containsKey(removedNode.getId())) { + Map<String, DiskUsage> newMaxUsages = new HashMap<>(leastAvailableSpaceUsages); + newMaxUsages.remove(removedNode.getId()); + leastAvailableSpaceUsages = Collections.unmodifiableMap(newMaxUsages); + } + if (mostAvailableSpaceUsages.containsKey(removedNode.getId())) { + Map<String, DiskUsage> newMinUsages = new HashMap<>(mostAvailableSpaceUsages); + newMinUsages.remove(removedNode.getId()); + mostAvailableSpaceUsages = Collections.unmodifiableMap(newMinUsages); + } } } } @@ -210,7 +217,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu @Override public ClusterInfo getClusterInfo() { - return new ClusterInfo(usages, shardSizes); + return new ClusterInfo(leastAvailableSpaceUsages, mostAvailableSpaceUsages, shardSizes); } @Override @@ -313,27 +320,11 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu CountDownLatch nodeLatch = updateNodeStats(new ActionListener<NodesStatsResponse>() { @Override public void onResponse(NodesStatsResponse nodeStatses) { - Map<String, DiskUsage> newUsages = new HashMap<>(); - for (NodeStats nodeStats : nodeStatses.getNodes()) { - if (nodeStats.getFs() == null) { - logger.warn("Unable to retrieve node FS stats for {}", nodeStats.getNode().name()); - } else { - long available = 0; - long total = 0; - - for (FsInfo.Path info : nodeStats.getFs()) { - available += info.getAvailable().bytes(); - total += info.getTotal().bytes(); - } - String nodeId = nodeStats.getNode().id(); - String nodeName = nodeStats.getNode().getName(); - if (logger.isTraceEnabled()) { - logger.trace("node: [{}], total disk: {}, available disk: {}", nodeId, total, available); - } - newUsages.put(nodeId, new DiskUsage(nodeId, nodeName, total, available)); - } - } - usages = Collections.unmodifiableMap(newUsages); + Map<String, DiskUsage> newLeastAvaiableUsages = new HashMap<>(); + Map<String, DiskUsage> newMostAvaiableUsages = new HashMap<>(); + fillDiskUsagePerNode(logger, nodeStatses.getNodes(), newLeastAvaiableUsages, newMostAvaiableUsages); + leastAvailableSpaceUsages = Collections.unmodifiableMap(newLeastAvaiableUsages); + mostAvailableSpaceUsages = Collections.unmodifiableMap(newMostAvaiableUsages); } @Override @@ -349,7 +340,8 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu logger.warn("Failed to execute NodeStatsAction for ClusterInfoUpdateJob", e); } // we empty the usages list, to be safe - we don't know what's going on. - usages = Collections.emptyMap(); + leastAvailableSpaceUsages = Collections.emptyMap(); + mostAvailableSpaceUsages = Collections.emptyMap(); } } }); @@ -412,5 +404,34 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu } } + static void fillDiskUsagePerNode(ESLogger logger, NodeStats[] nodeStatsArray, Map<String, DiskUsage> newLeastAvaiableUsages, Map<String, DiskUsage> newMostAvaiableUsages) { + for (NodeStats nodeStats : nodeStatsArray) { + if (nodeStats.getFs() == null) { + logger.warn("Unable to retrieve node FS stats for {}", nodeStats.getNode().name()); + } else { + FsInfo.Path leastAvailablePath = null; + FsInfo.Path mostAvailablePath = null; + for (FsInfo.Path info : nodeStats.getFs()) { + if (leastAvailablePath == null) { + assert mostAvailablePath == null; + mostAvailablePath = leastAvailablePath = info; + } else if (leastAvailablePath.getAvailable().bytes() > info.getAvailable().bytes()){ + leastAvailablePath = info; + } else if (mostAvailablePath.getAvailable().bytes() < info.getAvailable().bytes()) { + mostAvailablePath = info; + } + } + String nodeId = nodeStats.getNode().id(); + String nodeName = nodeStats.getNode().getName(); + if (logger.isTraceEnabled()) { + logger.trace("node: [{}], most available: total disk: {}, available disk: {} / least available: total disk: {}, available disk: {}", nodeId, mostAvailablePath.getTotal(), leastAvailablePath.getAvailable(), leastAvailablePath.getTotal(), leastAvailablePath.getAvailable()); + } + newLeastAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, leastAvailablePath.getPath(), leastAvailablePath.getTotal().bytes(), leastAvailablePath.getAvailable().bytes())); + newMostAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, mostAvailablePath.getPath(), mostAvailablePath.getTotal().bytes(), mostAvailablePath.getAvailable().bytes())); + + } + } + } + } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 2f28dcaf77d..eb5b716017a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -41,10 +41,17 @@ import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.filterEntries; import static com.google.common.collect.Maps.newHashMap; @@ -268,7 +275,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { if (filteringRequired) { // If filtering required - add it to the list of filters if (filteringAliases == null) { - filteringAliases = newArrayList(); + filteringAliases = new ArrayList<>(); } filteringAliases.add(alias); } else { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 32098e58c83..001bf424a2b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -23,7 +23,10 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.base.Predicate; -import com.google.common.collect.*; +import com.google.common.collect.Collections2; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.UnmodifiableIterator; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; @@ -45,7 +48,12 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -55,7 +63,18 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import static org.elasticsearch.common.settings.Settings.*; @@ -246,7 +265,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); - List<AliasMetaData> filteredValues = Lists.newArrayList(); + List<AliasMetaData> filteredValues = new ArrayList<>(); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData value = cursor.value; if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) { @@ -295,7 +314,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); - List<AliasMetaData> filteredValues = Lists.newArrayList(); + List<AliasMetaData> filteredValues = new ArrayList<>(); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData value = cursor.value; if (Regex.simpleMatch(aliases, value.alias())) { @@ -978,14 +997,14 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. // When doing an operation across all indices, most of the time is spent on actually going to all shards and // do the required operations, the bottleneck isn't resolving expressions into concrete indices. - List<String> allIndicesLst = Lists.newArrayList(); + List<String> allIndicesLst = new ArrayList<>(); for (ObjectCursor<IndexMetaData> cursor : indices.values()) { allIndicesLst.add(cursor.value.index()); } String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]); - List<String> allOpenIndicesLst = Lists.newArrayList(); - List<String> allClosedIndicesLst = Lists.newArrayList(); + List<String> allOpenIndicesLst = new ArrayList<>(); + List<String> allClosedIndicesLst = new ArrayList<>(); for (ObjectCursor<IndexMetaData> cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; if (indexMetaData.state() == IndexMetaData.State.OPEN) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index ff878a4f721..bebb8e52f07 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.base.Charsets; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; @@ -80,7 +79,12 @@ import java.io.UnsupportedEncodingException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; @@ -248,7 +252,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { Map<String, AliasMetaData> templatesAliases = Maps.newHashMap(); - List<String> templateNames = Lists.newArrayList(); + List<String> templateNames = new ArrayList<>(); for (Map.Entry<String, String> entry : request.mappings().entrySet()) { mappings.put(entry.getKey(), parseMapping(entry.getValue())); @@ -494,7 +498,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } private List<IndexTemplateMetaData> findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state, IndexTemplateFilter indexTemplateFilter) throws IOException { - List<IndexTemplateMetaData> templates = Lists.newArrayList(); + List<IndexTemplateMetaData> templates = new ArrayList<>(); for (ObjectCursor<IndexTemplateMetaData> cursor : state.metaData().templates().values()) { IndexTemplateMetaData template = cursor.value; if (indexTemplateFilter.apply(request, template)) { @@ -527,7 +531,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { List<String> getIndexSettingsValidationErrors(Settings settings) { String customPath = settings.get(IndexMetaData.SETTING_DATA_PATH, null); - List<String> validationErrors = Lists.newArrayList(); + List<String> validationErrors = new ArrayList<>(); if (customPath != null && env.sharedDataFile() == null) { validationErrors.add("path.shared_data must be set in order to use custom data paths"); } else if (customPath != null) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index d5512f63172..787640db639 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; @@ -34,10 +33,11 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesService; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -69,7 +69,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent { @Override public ClusterState execute(final ClusterState currentState) { - List<String> indicesToClose = Lists.newArrayList(); + List<String> indicesToClose = new ArrayList<>(); Map<String, IndexService> indices = Maps.newHashMap(); try { for (AliasAction aliasAction : request.actions()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 66ad39f9225..7e26c2ca743 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -39,6 +38,7 @@ import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; @@ -179,7 +179,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { } private void validate(PutRequest request) { - List<String> validationErrors = Lists.newArrayList(); + List<String> validationErrors = new ArrayList<>(); if (request.name.contains(" ")) { validationErrors.add("name must not contain a space"); } @@ -240,7 +240,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { String template; Settings settings = Settings.Builder.EMPTY_SETTINGS; Map<String, String> mappings = Maps.newHashMap(); - List<Alias> aliases = Lists.newArrayList(); + List<Alias> aliases = new ArrayList<>(); Map<String, IndexMetaData.Custom> customs = Maps.newHashMap(); TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 04d52b0ea55..5839502832d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.Version; @@ -47,7 +46,12 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.percolator.PercolatorService; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; import static com.google.common.collect.Maps.newHashMap; /** @@ -226,7 +230,7 @@ public class MetaDataMappingService extends AbstractComponent { if (task instanceof RefreshTask) { RefreshTask refreshTask = (RefreshTask) task; try { - List<String> updatedTypes = Lists.newArrayList(); + List<String> updatedTypes = new ArrayList<>(); for (String type : refreshTask.types) { if (processedRefreshes.contains(type)) { continue; @@ -342,7 +346,7 @@ public class MetaDataMappingService extends AbstractComponent { @Override public ClusterState execute(final ClusterState currentState) throws Exception { - List<String> indicesToClose = Lists.newArrayList(); + List<String> indicesToClose = new ArrayList<>(); try { for (String index : request.indices()) { if (!currentState.metaData().hasIndex(index)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 1b95aed4636..eebd770707f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -35,12 +35,11 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.transport.TransportAddress; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; - /** * This class holds all {@link DiscoveryNode} in the cluster and provides convenience methods to * access, modify merge / diff discovery nodes. @@ -414,8 +413,8 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements * Returns the changes comparing this nodes to the provided nodes. */ public Delta delta(DiscoveryNodes other) { - List<DiscoveryNode> removed = newArrayList(); - List<DiscoveryNode> added = newArrayList(); + List<DiscoveryNode> removed = new ArrayList<>(); + List<DiscoveryNode> added = new ArrayList<>(); for (DiscoveryNode node : other) { if (!this.nodeExists(node.id())) { removed.add(node); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 85a14e23008..e8311b5e294 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -25,7 +25,6 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.common.collect.UnmodifiableIterator; - import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -42,8 +41,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; -import static com.google.common.collect.Lists.*; - /** * The {@link IndexRoutingTable} represents routing information for a single * index. The routing table maintains a list of all shards in the index. A @@ -265,7 +262,7 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple * @return a {@link List} of shards that match one of the given {@link ShardRoutingState states} */ public List<ShardRouting> shardsWithState(ShardRoutingState state) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (IndexShardRoutingTable shardRoutingTable : this) { shards.addAll(shardRoutingTable.shardsWithState(state)); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index bc13e081563..5cc35aeacef 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -30,11 +30,15 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ThreadLocalRandom; -import static com.google.common.collect.Lists.newArrayList; - /** * {@link IndexShardRoutingTable} encapsulates all instances of a single shard. * Each Elasticsearch index consists of multiple shards, each shard encapsulates @@ -554,7 +558,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> { } public List<ShardRouting> replicaShardsWithState(ShardRoutingState... states) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : replicas) { for (ShardRoutingState state : states) { if (shardEntry.state() == state) { @@ -569,7 +573,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> { if (state == ShardRoutingState.INITIALIZING) { return allInitializingShards; } - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { if (shardEntry.state() == state) { shards.add(shardEntry); @@ -585,12 +589,12 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> { public Builder(IndexShardRoutingTable indexShard) { this.shardId = indexShard.shardId; - this.shards = newArrayList(indexShard.shards); + this.shards = new ArrayList<>(indexShard.shards); } public Builder(ShardId shardId) { this.shardId = shardId; - this.shards = newArrayList(); + this.shards = new ArrayList<>(); } public Builder addShard(ShardRouting shardEntry) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 6db68524992..411a1ed6817 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing; -import com.google.common.collect.Lists; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -36,6 +35,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -90,7 +90,7 @@ public class OperationRouting extends AbstractComponent { set.add(iterator); } } - return new GroupShardsIterator(Lists.newArrayList(set)); + return new GroupShardsIterator(new ArrayList<>(set)); } private static final Map<String, Set<String>> EMPTY_ROUTING = Collections.emptyMap(); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java index b96eece2398..43ad6aff1ec 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java @@ -27,8 +27,6 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * A {@link RoutingNode} represents a cluster node associated with a single {@link DiscoveryNode} including all shards * that are hosted on that nodes. Each {@link RoutingNode} has a unique node id that can be used to identify the node. @@ -118,7 +116,7 @@ public class RoutingNode implements Iterable<ShardRouting> { * @return List of shards */ public List<ShardRouting> shardsWithState(ShardRoutingState... states) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { for (ShardRoutingState state : states) { if (shardEntry.state() == state) { @@ -136,7 +134,7 @@ public class RoutingNode implements Iterable<ShardRouting> { * @return a list of shards */ public List<ShardRouting> shardsWithState(String index, ShardRoutingState... states) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { if (!shardEntry.index().equals(index)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 8c07429776a..0836909af8e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -22,19 +22,25 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.base.Predicate; -import com.google.common.collect.*; +import com.google.common.collect.Iterables; +import com.google.common.collect.Iterators; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.shard.ShardId; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; @@ -97,7 +103,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { if (shard.assignedToNode()) { List<ShardRouting> entries = nodesToShards.get(shard.currentNodeId()); if (entries == null) { - entries = newArrayList(); + entries = new ArrayList<>(); nodesToShards.put(shard.currentNodeId(), entries); } final ShardRouting sr = getRouting(shard, readOnly); @@ -107,7 +113,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { entries = nodesToShards.get(shard.relocatingNodeId()); relocatingShards++; if (entries == null) { - entries = newArrayList(); + entries = new ArrayList<>(); nodesToShards.put(shard.relocatingNodeId(), entries); } // add the counterpart shard with relocatingNodeId reflecting the source from which @@ -285,7 +291,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { } public List<ShardRouting> shards(Predicate<ShardRouting> predicate) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (RoutingNode routingNode : this) { for (ShardRouting shardRouting : routingNode) { if (predicate.apply(shardRouting)) { @@ -298,7 +304,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { public List<ShardRouting> shardsWithState(ShardRoutingState... state) { // TODO these are used on tests only - move into utils class - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (RoutingNode routingNode : this) { shards.addAll(routingNode.shardsWithState(state)); } @@ -313,7 +319,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { public List<ShardRouting> shardsWithState(String index, ShardRoutingState... state) { // TODO these are used on tests only - move into utils class - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (RoutingNode routingNode : this) { shards.addAll(routingNode.shardsWithState(index, state)); } @@ -452,7 +458,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { } List<ShardRouting> shards = assignedShards.get(shard.shardId()); if (shards == null) { - shards = Lists.newArrayList(); + shards = new ArrayList<>(); assignedShards.put(shard.shardId(), shards); } assert assertInstanceNotInList(shard, shards); @@ -724,7 +730,7 @@ public class RoutingNodes implements Iterable<RoutingNode> { } // Assert that the active shard routing are identical. Set<Map.Entry<String, Integer>> entries = indicesAndShards.entrySet(); - final List<ShardRouting> shards = newArrayList(); + final List<ShardRouting> shards = new ArrayList<>(); for (Map.Entry<String, Integer> e : entries) { String index = e.getKey(); for (int i = 0; i < e.getValue(); i++) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 449867b4fa9..1e3bd9614dd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -20,14 +20,19 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntSet; -import com.google.common.collect.*; -import org.elasticsearch.cluster.*; +import com.google.common.base.Predicate; +import com.google.common.base.Predicates; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.UnmodifiableIterator; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.Diffable; +import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.ArrayList; @@ -35,7 +40,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; /** @@ -107,7 +111,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi } public List<ShardRouting> shardsWithState(ShardRoutingState state) { - List<ShardRouting> shards = newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); for (IndexRoutingTable indexRoutingTable : this) { shards.addAll(indexRoutingTable.shardsWithState(state)); } @@ -120,7 +124,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi * @return All the shards */ public List<ShardRouting> allShards() { - List<ShardRouting> shards = Lists.newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); String[] indices = indicesRouting.keySet().toArray(new String[indicesRouting.keySet().size()]); for (String index : indices) { List<ShardRouting> allShardsIndex = allShards(index); @@ -137,7 +141,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi * @throws IndexNotFoundException If the index passed does not exists */ public List<ShardRouting> allShards(String index) { - List<ShardRouting> shards = Lists.newArrayList(); + List<ShardRouting> shards = new ArrayList<>(); IndexRoutingTable indexRoutingTable = index(index); if (indexRoutingTable == null) { throw new IndexNotFoundException(index); @@ -162,28 +166,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi * iterator contains a single ShardRouting pointing at the relocating target */ public GroupShardsIterator allActiveShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) { - // use list here since we need to maintain identity across shards - ArrayList<ShardIterator> set = new ArrayList<>(); - for (String index : indices) { - IndexRoutingTable indexRoutingTable = index(index); - if (indexRoutingTable == null) { - continue; - // we simply ignore indices that don't exists (make sense for operations that use it currently) - } - for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { - for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.active()) { - set.add(shardRouting.shardsIt()); - if (includeRelocationTargets && shardRouting.relocating()) { - set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard()))); - } - } else if (includeEmpty) { // we need this for counting properly, just make it an empty one - set.add(new PlainShardIterator(shardRouting.shardId(), Collections.<ShardRouting>emptyList())); - } - } - } - } - return new GroupShardsIterator(set); + return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ACTIVE_PREDICATE); } public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty) { @@ -198,6 +181,25 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi * iterator contains a single ShardRouting pointing at the relocating target */ public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) { + return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE); + } + + private static Predicate<ShardRouting> ACTIVE_PREDICATE = new Predicate<ShardRouting>() { + @Override + public boolean apply(ShardRouting shardRouting) { + return shardRouting.active(); + } + }; + + private static Predicate<ShardRouting> ASSIGNED_PREDICATE = new Predicate<ShardRouting>() { + @Override + public boolean apply(ShardRouting shardRouting) { + return shardRouting.assignedToNode(); + } + }; + + // TODO: replace with JDK 8 native java.util.function.Predicate + private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) { // use list here since we need to maintain identity across shards ArrayList<ShardIterator> set = new ArrayList<>(); for (String index : indices) { @@ -208,7 +210,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi } for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.assignedToNode()) { + if (predicate.apply(shardRouting)) { set.add(shardRouting.shardsIt()); if (includeRelocationTargets && shardRouting.relocating()) { set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard()))); @@ -222,6 +224,38 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi return new GroupShardsIterator(set); } + public ShardsIterator allShards(String[] indices) { + return allShardsSatisfyingPredicate(indices, Predicates.<ShardRouting>alwaysTrue(), false); + } + + public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) { + return allShardsSatisfyingPredicate(indices, Predicates.<ShardRouting>alwaysTrue(), true); + } + + // TODO: replace with JDK 8 native java.util.function.Predicate + private ShardsIterator allShardsSatisfyingPredicate(String[] indices, Predicate<ShardRouting> predicate, boolean includeRelocationTargets) { + // use list here since we need to maintain identity across shards + List<ShardRouting> shards = new ArrayList<>(); + for (String index : indices) { + IndexRoutingTable indexRoutingTable = index(index); + if (indexRoutingTable == null) { + continue; + // we simply ignore indices that don't exists (make sense for operations that use it currently) + } + for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { + for (ShardRouting shardRouting : indexShardRoutingTable) { + if (predicate.apply(shardRouting)) { + shards.add(shardRouting); + if (includeRelocationTargets && shardRouting.relocating()) { + shards.add(shardRouting.buildTargetRelocatingShard()); + } + } + } + } + } + return new PlainShardsIterator(shards); + } + /** * All the *active* primary shards for the provided indices grouped (each group is a single element, consisting * of the primary shard). This is handy for components that expect to get group iterators, but still want in some diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTableValidation.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTableValidation.java index bd1e283f436..acd207f8ad5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTableValidation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTableValidation.java @@ -21,17 +21,15 @@ package org.elasticsearch.cluster.routing; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; -import static com.google.common.collect.Lists.newArrayListWithCapacity; import static com.google.common.collect.Maps.newHashMap; /** @@ -57,7 +55,7 @@ public class RoutingTableValidation implements Streamable { if (failures().isEmpty() && indicesFailures().isEmpty()) { return ImmutableList.of(); } - List<String> allFailures = newArrayList(failures()); + List<String> allFailures = new ArrayList<>(failures()); for (Map.Entry<String, List<String>> entry : indicesFailures().entrySet()) { for (String failure : entry.getValue()) { allFailures.add("Index [" + entry.getKey() + "]: " + failure); @@ -94,7 +92,7 @@ public class RoutingTableValidation implements Streamable { public void addFailure(String failure) { valid = false; if (failures == null) { - failures = newArrayList(); + failures = new ArrayList<>(); } failures.add(failure); } @@ -106,7 +104,7 @@ public class RoutingTableValidation implements Streamable { } List<String> indexFailures = indicesFailures.get(index); if (indexFailures == null) { - indexFailures = Lists.newArrayList(); + indexFailures = new ArrayList<>(); indicesFailures.put(index, indexFailures); } indexFailures.add(failure); @@ -124,7 +122,7 @@ public class RoutingTableValidation implements Streamable { if (size == 0) { failures = ImmutableList.of(); } else { - failures = Lists.newArrayListWithCapacity(size); + failures = new ArrayList<>(size); for (int i = 0; i < size; i++) { failures.add(in.readString()); } @@ -137,7 +135,7 @@ public class RoutingTableValidation implements Streamable { for (int i = 0; i < size; i++) { String index = in.readString(); int size2 = in.readVInt(); - List<String> indexFailures = newArrayListWithCapacity(size2); + List<String> indexFailures = new ArrayList<>(size2); for (int j = 0; j < size2; j++) { indexFailures.add(in.readString()); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java index 4bd19ca2526..f8126f5538a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,6 +27,7 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -88,7 +88,7 @@ public class AllocationExplanation implements Streamable { public AllocationExplanation add(ShardId shardId, NodeExplanation nodeExplanation) { List<NodeExplanation> list = explanations.get(shardId); if (list == null) { - list = Lists.newArrayList(); + list = new ArrayList<>(); explanations.put(shardId, list); } list.add(nodeExplanation); @@ -121,7 +121,7 @@ public class AllocationExplanation implements Streamable { for (int i = 0; i < size; i++) { ShardId shardId = ShardId.readShardId(in); int size2 = in.readVInt(); - List<NodeExplanation> ne = Lists.newArrayListWithCapacity(size2); + List<NodeExplanation> ne = new ArrayList<>(size2); for (int j = 0; j < size2; j++) { DiscoveryNode node = null; if (in.readBoolean()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index e5a46855e0c..191ec9556c5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -21,12 +21,16 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; @@ -36,7 +40,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; @@ -235,7 +238,7 @@ public class AllocationService extends AbstractComponent { } // go over and remove dangling replicas that are initializing for primary shards - List<ShardRouting> shardsToFail = Lists.newArrayList(); + List<ShardRouting> shardsToFail = new ArrayList<>(); for (ShardRouting shardEntry : routingNodes.unassigned()) { if (shardEntry.primary()) { for (ShardRouting routing : routingNodes.assignedShards(shardEntry)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java index 8601bf1d79e..95f1a29eed5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java @@ -25,10 +25,9 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * Class used to encapsulate a number of {@link RerouteExplanation} * explanations. @@ -37,7 +36,7 @@ public class RoutingExplanations implements ToXContent { private final List<RerouteExplanation> explanations; public RoutingExplanations() { - this.explanations = newArrayList(); + this.explanations = new ArrayList<>(); } public RoutingExplanations add(RerouteExplanation explanation) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java index 33a44188407..3e17ce68584 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java @@ -20,8 +20,8 @@ package org.elasticsearch.cluster.routing.allocation.command; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,13 +30,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; - /** * A simple {@link AllocationCommand} composite managing several * {@link AllocationCommand} implementations @@ -73,7 +72,7 @@ public class AllocationCommands { registerFactory(MoveAllocationCommand.NAME, new MoveAllocationCommand.Factory()); } - private final List<AllocationCommand> commands = newArrayList(); + private final List<AllocationCommand> commands = new ArrayList<>(); /** * Creates a new set of {@link AllocationCommands} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/Decision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/Decision.java index 76922ae2462..02fc2fef948 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/Decision.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/Decision.java @@ -19,13 +19,13 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import com.google.common.collect.Lists; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -226,7 +226,7 @@ public abstract class Decision implements ToXContent { */ public static class Multi extends Decision { - private final List<Decision> decisions = Lists.newArrayList(); + private final List<Decision> decisions = new ArrayList<>(); /** * Add a decision to this {@link Multi}decision instance diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 2a438de800f..7db6294cc93 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -164,7 +164,7 @@ public class DiskThresholdDecider extends AllocationDecider { @Override public void onNewInfo(ClusterInfo info) { - Map<String, DiskUsage> usages = info.getNodeDiskUsages(); + Map<String, DiskUsage> usages = info.getNodeLeastAvailableDiskUsages(); if (usages != null) { boolean reroute = false; String explanation = ""; @@ -339,7 +339,9 @@ public class DiskThresholdDecider extends AllocationDecider { final double usedDiskThresholdLow = 100.0 - DiskThresholdDecider.this.freeDiskThresholdLow; final double usedDiskThresholdHigh = 100.0 - DiskThresholdDecider.this.freeDiskThresholdHigh; - DiskUsage usage = getDiskUsage(node, allocation); + ClusterInfo clusterInfo = allocation.clusterInfo(); + Map<String, DiskUsage> usages = clusterInfo.getNodeMostAvailableDiskUsages(); + DiskUsage usage = getDiskUsage(node, allocation, usages); // First, check that the node currently over the low watermark double freeDiskPercentage = usage.getFreeDiskAsPercentage(); // Cache the used disk percentage for displaying disk percentages consistent with documentation @@ -441,11 +443,16 @@ public class DiskThresholdDecider extends AllocationDecider { @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + if (shardRouting.currentNodeId().equals(node.nodeId()) == false) { + throw new IllegalArgumentException("Shard [" + shardRouting + "] is not allocated on node: [" + node.nodeId() + "]"); + } final Decision decision = earlyTerminate(allocation); if (decision != null) { return decision; } - DiskUsage usage = getDiskUsage(node, allocation); + ClusterInfo clusterInfo = allocation.clusterInfo(); + Map<String, DiskUsage> usages = clusterInfo.getNodeLeastAvailableDiskUsages(); + DiskUsage usage = getDiskUsage(node, allocation, usages); // If this node is already above the high threshold, the shard cannot remain (get it off!) double freeDiskPercentage = usage.getFreeDiskAsPercentage(); long freeBytes = usage.getFreeBytes(); @@ -472,9 +479,8 @@ public class DiskThresholdDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "enough disk for shard to remain on node, free: [%s]", new ByteSizeValue(freeBytes)); } - private DiskUsage getDiskUsage(RoutingNode node, RoutingAllocation allocation) { + private DiskUsage getDiskUsage(RoutingNode node, RoutingAllocation allocation, Map<String, DiskUsage> usages) { ClusterInfo clusterInfo = allocation.clusterInfo(); - Map<String, DiskUsage> usages = clusterInfo.getNodeDiskUsages(); DiskUsage usage = usages.get(node.nodeId()); if (usage == null) { // If there is no usage, and we have other nodes in the cluster, @@ -488,7 +494,7 @@ public class DiskThresholdDecider extends AllocationDecider { if (includeRelocations) { long relocatingShardsSize = sizeOfRelocatingShards(node, clusterInfo, true); - DiskUsage usageIncludingRelocations = new DiskUsage(node.nodeId(), node.node().name(), + DiskUsage usageIncludingRelocations = new DiskUsage(node.nodeId(), node.node().name(), "_na_", usage.getTotalBytes(), usage.getFreeBytes() - relocatingShardsSize); if (logger.isTraceEnabled()) { logger.trace("usage without relocations: {}", usage); @@ -508,7 +514,7 @@ public class DiskThresholdDecider extends AllocationDecider { */ public DiskUsage averageUsage(RoutingNode node, Map<String, DiskUsage> usages) { if (usages.size() == 0) { - return new DiskUsage(node.nodeId(), node.node().name(), 0, 0); + return new DiskUsage(node.nodeId(), node.node().name(), "_na_", 0, 0); } long totalBytes = 0; long freeBytes = 0; @@ -516,7 +522,7 @@ public class DiskThresholdDecider extends AllocationDecider { totalBytes += du.getTotalBytes(); freeBytes += du.getFreeBytes(); } - return new DiskUsage(node.nodeId(), node.node().name(), totalBytes / usages.size(), freeBytes / usages.size()); + return new DiskUsage(node.nodeId(), node.node().name(), "_na_", totalBytes / usages.size(), freeBytes / usages.size()); } /** @@ -528,8 +534,8 @@ public class DiskThresholdDecider extends AllocationDecider { */ public double freeDiskPercentageAfterShardAssigned(DiskUsage usage, Long shardSize) { shardSize = (shardSize == null) ? 0 : shardSize; - DiskUsage newUsage = new DiskUsage(usage.getNodeId(), usage.getNodeName(), - usage.getTotalBytes(), usage.getFreeBytes() - shardSize); + DiskUsage newUsage = new DiskUsage(usage.getNodeId(), usage.getNodeName(), usage.getPath(), + usage.getTotalBytes(), usage.getFreeBytes() - shardSize); return newUsage.getFreeDiskAsPercentage(); } @@ -600,7 +606,7 @@ public class DiskThresholdDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "cluster info unavailable"); } - final Map<String, DiskUsage> usages = clusterInfo.getNodeDiskUsages(); + final Map<String, DiskUsage> usages = clusterInfo.getNodeLeastAvailableDiskUsages(); // Fail open if there are no disk usages available if (usages.isEmpty()) { if (logger.isTraceEnabled()) { diff --git a/core/src/main/java/org/elasticsearch/common/Strings.java b/core/src/main/java/org/elasticsearch/common/Strings.java index 58210e0fe5c..302c5b2cfd1 100644 --- a/core/src/main/java/org/elasticsearch/common/Strings.java +++ b/core/src/main/java/org/elasticsearch/common/Strings.java @@ -943,7 +943,8 @@ public class Strings { boolean changed = false; for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); - if (c == '_') { + //e.g. _name stays as-is, _first_name becomes _firstName + if (c == '_' && i > 0) { if (!changed) { if (sb != null) { sb.setLength(0); diff --git a/core/src/main/java/org/elasticsearch/common/ValidationException.java b/core/src/main/java/org/elasticsearch/common/ValidationException.java index 13288765396..f3fbe5e4bc0 100644 --- a/core/src/main/java/org/elasticsearch/common/ValidationException.java +++ b/core/src/main/java/org/elasticsearch/common/ValidationException.java @@ -36,7 +36,7 @@ public class ValidationException extends IllegalArgumentException { * Add a new validation error to the accumulating validation errors * @param error the error to add */ - public void addValidationError(String error) { + public final void addValidationError(String error) { validationErrors.add(error); } @@ -44,7 +44,7 @@ public class ValidationException extends IllegalArgumentException { * Add a sequence of validation errors to the accumulating validation errors * @param errors the errors to add */ - public void addValidationErrors(Iterable<String> errors) { + public final void addValidationErrors(Iterable<String> errors) { for (String error : errors) { validationErrors.add(error); } @@ -52,14 +52,13 @@ public class ValidationException extends IllegalArgumentException { /** * Returns the validation errors accumulated - * @return */ - public List<String> validationErrors() { + public final List<String> validationErrors() { return validationErrors; } @Override - public String getMessage() { + public final String getMessage() { StringBuilder sb = new StringBuilder(); sb.append("Validation Failed: "); int index = 0; diff --git a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java index 3646595f724..bb7a642c987 100644 --- a/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ b/core/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java @@ -46,8 +46,7 @@ public class LZFCompressor implements Compressor { public LZFCompressor() { this.decoder = ChunkDecoderFactory.safeInstance(); - Loggers.getLogger(LZFCompressor.class).debug("using encoder [{}] and decoder[{}] ", - this.decoder.getClass().getSimpleName()); + Loggers.getLogger(LZFCompressor.class).debug("using decoder[{}] ", this.decoder.getClass().getSimpleName()); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java index a4a154330aa..da57b1c882a 100644 --- a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java +++ b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java @@ -27,7 +27,6 @@ import org.elasticsearch.*; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.ByteArray; import java.io.*; import java.net.HttpURLConnection; @@ -37,9 +36,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.attribute.FileTime; -import java.util.Arrays; import java.util.List; -import java.util.concurrent.Callable; /** * @@ -152,12 +149,6 @@ public class HttpDownloadHelper { } catch (FileNotFoundException | NoSuchFileException e) { // checksum file doesn't exist return false; - } catch (IOException e) { - if (ExceptionsHelper.unwrapCause(e) instanceof FileNotFoundException) { - // checksum file didn't exist - return false; - } - throw e; } finally { IOUtils.deleteFilesIgnoringExceptions(checksumFile); } @@ -378,9 +369,6 @@ public class HttpDownloadHelper { responseCode == HttpURLConnection.HTTP_MOVED_TEMP || responseCode == HttpURLConnection.HTTP_SEE_OTHER) { String newLocation = httpConnection.getHeaderField("Location"); - String message = aSource - + (responseCode == HttpURLConnection.HTTP_MOVED_PERM ? " permanently" - : "") + " moved to " + newLocation; URL newURL = new URL(newLocation); if (!redirectionAllowed(aSource, newURL)) { return null; @@ -426,7 +414,7 @@ public class HttpDownloadHelper { } } if (is == null) { - throw new IOException("Can't get " + source + " to " + dest, lastEx); + throw lastEx; } os = Files.newOutputStream(dest); diff --git a/core/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index 9966a4e0206..58ea0be016d 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -17,10 +17,33 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import org.elasticsearch.common.inject.internal.*; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.internal.Annotations; +import org.elasticsearch.common.inject.internal.BindingImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.ExposedBindingImpl; +import org.elasticsearch.common.inject.internal.InstanceBindingImpl; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.LinkedBindingImpl; +import org.elasticsearch.common.inject.internal.LinkedProviderBindingImpl; +import org.elasticsearch.common.inject.internal.ProviderInstanceBindingImpl; +import org.elasticsearch.common.inject.internal.ProviderMethod; +import org.elasticsearch.common.inject.internal.Scoping; +import org.elasticsearch.common.inject.internal.UntargettedBindingImpl; +import org.elasticsearch.common.inject.spi.BindingTargetVisitor; +import org.elasticsearch.common.inject.spi.ConstructorBinding; +import org.elasticsearch.common.inject.spi.ConvertedConstantBinding; +import org.elasticsearch.common.inject.spi.ExposedBinding; +import org.elasticsearch.common.inject.spi.InjectionPoint; +import org.elasticsearch.common.inject.spi.InstanceBinding; +import org.elasticsearch.common.inject.spi.LinkedKeyBinding; +import org.elasticsearch.common.inject.spi.PrivateElements; +import org.elasticsearch.common.inject.spi.ProviderBinding; +import org.elasticsearch.common.inject.spi.ProviderInstanceBinding; +import org.elasticsearch.common.inject.spi.ProviderKeyBinding; +import org.elasticsearch.common.inject.spi.UntargettedBinding; +import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -32,9 +55,9 @@ import java.util.Set; */ class BindingProcessor extends AbstractProcessor { - private final List<CreationListener> creationListeners = Lists.newArrayList(); + private final List<CreationListener> creationListeners = new ArrayList<>(); private final Initializer initializer; - private final List<Runnable> uninitializedBindings = Lists.newArrayList(); + private final List<Runnable> uninitializedBindings = new ArrayList<>(); BindingProcessor(Errors errors, Initializer initializer) { super(errors); diff --git a/core/src/main/java/org/elasticsearch/common/inject/DeferredLookups.java b/core/src/main/java/org/elasticsearch/common/inject/DeferredLookups.java index 1631d69bb81..c6ca9bab08a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/DeferredLookups.java +++ b/core/src/main/java/org/elasticsearch/common/inject/DeferredLookups.java @@ -16,12 +16,12 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.Element; import org.elasticsearch.common.inject.spi.MembersInjectorLookup; import org.elasticsearch.common.inject.spi.ProviderLookup; +import java.util.ArrayList; import java.util.List; /** @@ -32,7 +32,7 @@ import java.util.List; */ class DeferredLookups implements Lookups { private final InjectorImpl injector; - private final List<Element> lookups = Lists.newArrayList(); + private final List<Element> lookups = new ArrayList<>(); public DeferredLookups(InjectorImpl injector) { this.injector = injector; diff --git a/core/src/main/java/org/elasticsearch/common/inject/EncounterImpl.java b/core/src/main/java/org/elasticsearch/common/inject/EncounterImpl.java index 4a06a48c9cf..2a3c419cc36 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/EncounterImpl.java +++ b/core/src/main/java/org/elasticsearch/common/inject/EncounterImpl.java @@ -17,12 +17,12 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.InjectionListener; import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.spi.TypeEncounter; +import java.util.ArrayList; import java.util.List; import static com.google.common.base.Preconditions.checkState; @@ -64,7 +64,7 @@ final class EncounterImpl<T> implements TypeEncounter<T> { checkState(valid, "Encounters may not be used after hear() returns."); if (membersInjectors == null) { - membersInjectors = Lists.newArrayList(); + membersInjectors = new ArrayList<>(); } membersInjectors.add(membersInjector); @@ -75,7 +75,7 @@ final class EncounterImpl<T> implements TypeEncounter<T> { checkState(valid, "Encounters may not be used after hear() returns."); if (injectionListeners == null) { - injectionListeners = Lists.newArrayList(); + injectionListeners = new ArrayList<>(); } injectionListeners.add(injectionListener); diff --git a/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java b/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java index fb14444fa9a..38b8494d3ef 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java @@ -17,9 +17,13 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.BindingImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.InstanceBindingImpl; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.MatcherAndConverter; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.spi.InjectionPoint; import org.elasticsearch.common.inject.spi.TypeListenerBinding; @@ -43,8 +47,8 @@ class InheritingState implements State { private final Map<Key<?>, Binding<?>> explicitBindings = Collections.unmodifiableMap(explicitBindingsMutable); private final Map<Class<? extends Annotation>, Scope> scopes = Maps.newHashMap(); - private final List<MatcherAndConverter> converters = Lists.newArrayList(); - private final List<TypeListenerBinding> listenerBindings = Lists.newArrayList(); + private final List<MatcherAndConverter> converters = new ArrayList<>(); + private final List<TypeListenerBinding> listenerBindings = new ArrayList<>(); private WeakKeySet blacklistedKeys = new WeakKeySet(); private final Object lock; diff --git a/core/src/main/java/org/elasticsearch/common/inject/Initializer.java b/core/src/main/java/org/elasticsearch/common/inject/Initializer.java index 038f9b6a997..6ee855bb673 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Initializer.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Initializer.java @@ -16,12 +16,12 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.spi.InjectionPoint; +import java.util.ArrayList; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -95,7 +95,7 @@ class Initializer { void injectAll(final Errors errors) { // loop over a defensive copy since ensureInjected() mutates the set. Unfortunately, that copy // is made complicated by a bug in IBM's JDK, wherein entrySet().toArray(Object[]) doesn't work - for (InjectableReference<?> reference : Lists.newArrayList(pendingInjection.values())) { + for (InjectableReference<?> reference : new ArrayList<>(pendingInjection.values())) { try { reference.get(errors); } catch (ErrorsException e) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/InjectionRequestProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/InjectionRequestProcessor.java index 35e69d33195..b645f41c3b8 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InjectionRequestProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InjectionRequestProcessor.java @@ -17,7 +17,6 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.internal.InternalContext; @@ -25,6 +24,7 @@ import org.elasticsearch.common.inject.spi.InjectionPoint; import org.elasticsearch.common.inject.spi.InjectionRequest; import org.elasticsearch.common.inject.spi.StaticInjectionRequest; +import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -37,7 +37,7 @@ import java.util.Set; */ class InjectionRequestProcessor extends AbstractProcessor { - private final List<StaticInjection> staticInjections = Lists.newArrayList(); + private final List<StaticInjection> staticInjections = new ArrayList<>(); private final Initializer initializer; InjectionRequestProcessor(Errors errors, Initializer initializer) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java b/core/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java index 6f2540c97be..a8048513055 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java @@ -18,15 +18,37 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.common.Classes; -import org.elasticsearch.common.inject.internal.*; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.internal.Annotations; +import org.elasticsearch.common.inject.internal.BindingImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InstanceBindingImpl; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.LinkedBindingImpl; +import org.elasticsearch.common.inject.internal.LinkedProviderBindingImpl; +import org.elasticsearch.common.inject.internal.MatcherAndConverter; +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.inject.internal.Scoping; +import org.elasticsearch.common.inject.internal.SourceProvider; +import org.elasticsearch.common.inject.internal.ToStringBuilder; +import org.elasticsearch.common.inject.spi.BindingTargetVisitor; +import org.elasticsearch.common.inject.spi.ConvertedConstantBinding; +import org.elasticsearch.common.inject.spi.Dependency; +import org.elasticsearch.common.inject.spi.InjectionPoint; +import org.elasticsearch.common.inject.spi.ProviderBinding; +import org.elasticsearch.common.inject.spi.ProviderKeyBinding; import org.elasticsearch.common.inject.util.Providers; import java.lang.annotation.Annotation; -import java.lang.reflect.*; +import java.lang.reflect.GenericArrayType; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Modifier; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -681,7 +703,7 @@ class InjectorImpl implements Injector, Lookups { <T> void put(TypeLiteral<T> type, Binding<T> binding) { List<Binding<?>> bindingsForType = multimap.get(type); if (bindingsForType == null) { - bindingsForType = Lists.newArrayList(); + bindingsForType = new ArrayList<>(); multimap.put(type, bindingsForType); } bindingsForType.add(binding); diff --git a/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java b/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java index f463a24b751..5ac7934fc74 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java @@ -17,10 +17,23 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import org.elasticsearch.common.inject.internal.*; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.InternalContext; +import org.elasticsearch.common.inject.internal.InternalFactory; +import org.elasticsearch.common.inject.internal.PrivateElementsImpl; +import org.elasticsearch.common.inject.internal.ProviderInstanceBindingImpl; +import org.elasticsearch.common.inject.internal.Scoping; +import org.elasticsearch.common.inject.internal.SourceProvider; +import org.elasticsearch.common.inject.internal.Stopwatch; +import org.elasticsearch.common.inject.spi.Dependency; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.Elements; +import org.elasticsearch.common.inject.spi.InjectionPoint; +import org.elasticsearch.common.inject.spi.PrivateElements; +import org.elasticsearch.common.inject.spi.TypeListenerBinding; +import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; @@ -59,8 +72,8 @@ class InjectorShell { } static class Builder { - private final List<Element> elements = Lists.newArrayList(); - private final List<Module> modules = Lists.newArrayList(); + private final List<Element> elements = new ArrayList<>(); + private final List<Module> modules = new ArrayList<>(); /** * lazily constructed @@ -148,7 +161,7 @@ class InjectorShell { bindingProcessor.process(injector, elements); stopwatch.resetAndLog("Binding creation"); - List<InjectorShell> injectorShells = Lists.newArrayList(); + List<InjectorShell> injectorShells = new ArrayList<>(); injectorShells.add(new InjectorShell(this, elements, injector)); // recursively build child shells diff --git a/core/src/main/java/org/elasticsearch/common/inject/MembersInjectorStore.java b/core/src/main/java/org/elasticsearch/common/inject/MembersInjectorStore.java index 99d0924576d..cd7c168860a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/MembersInjectorStore.java +++ b/core/src/main/java/org/elasticsearch/common/inject/MembersInjectorStore.java @@ -17,7 +17,6 @@ package org.elasticsearch.common.inject; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.internal.FailableCache; @@ -25,6 +24,7 @@ import org.elasticsearch.common.inject.spi.InjectionPoint; import org.elasticsearch.common.inject.spi.TypeListenerBinding; import java.lang.reflect.Field; +import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -106,7 +106,7 @@ class MembersInjectorStore { */ ImmutableList<SingleMemberInjector> getInjectors( Set<InjectionPoint> injectionPoints, Errors errors) { - List<SingleMemberInjector> injectors = Lists.newArrayList(); + List<SingleMemberInjector> injectors = new ArrayList<>(); for (InjectionPoint injectionPoint : injectionPoints) { try { Errors errorsForMember = injectionPoint.isOptional() diff --git a/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java b/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java index c65a07dcfbc..41cdecfbcad 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ModulesBuilder.java @@ -19,8 +19,7 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Lists; - +import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -29,7 +28,7 @@ import java.util.List; */ public class ModulesBuilder implements Iterable<Module> { - private final List<Module> modules = Lists.newArrayList(); + private final List<Module> modules = new ArrayList<>(); public ModulesBuilder add(Module... newModules) { for (Module module : newModules) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/PrivateElementProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/PrivateElementProcessor.java index 9b0fd3d7e07..2b7dc692996 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/PrivateElementProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/PrivateElementProcessor.java @@ -16,10 +16,10 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.PrivateElements; +import java.util.ArrayList; import java.util.List; /** @@ -30,7 +30,7 @@ import java.util.List; class PrivateElementProcessor extends AbstractProcessor { private final Stage stage; - private final List<InjectorShell.Builder> injectorShellBuilders = Lists.newArrayList(); + private final List<InjectorShell.Builder> injectorShellBuilders = new ArrayList<>(); PrivateElementProcessor(Errors errors, Stage stage) { super(errors); diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java index 173abae910e..3973d9eb7e8 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java @@ -16,7 +16,6 @@ package org.elasticsearch.common.inject.assistedinject; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.TypeLiteral; @@ -24,7 +23,11 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Type; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * Internal respresentation of a constructor annotated with @@ -45,7 +48,7 @@ class AssistedConstructor<T> { Annotation[][] annotations = constructor.getParameterAnnotations(); - List<Type> typeList = Lists.newArrayList(); + List<Type> typeList = new ArrayList<>(); allParameters = new ArrayList<>(); // categorize params as @Assisted or @Injected diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java index e36fabb9000..cee76821edb 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider.java @@ -18,16 +18,25 @@ package org.elasticsearch.common.inject.assistedinject; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.HasDependencies; import org.elasticsearch.common.inject.spi.Message; import java.lang.annotation.Annotation; -import java.lang.reflect.*; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.lang.reflect.Type; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -209,7 +218,7 @@ public class FactoryProvider<F> implements Provider<F>, HasDependencies { private static Map<Method, AssistedConstructor<?>> createMethodMapping( TypeLiteral<?> factoryType, TypeLiteral<?> implementationType) { - List<AssistedConstructor<?>> constructors = Lists.newArrayList(); + List<AssistedConstructor<?>> constructors = new ArrayList<>(); for (Constructor<?> constructor : implementationType.getRawType().getDeclaredConstructors()) { if (constructor.getAnnotation(AssistedInject.class) != null) { @@ -248,7 +257,7 @@ public class FactoryProvider<F> implements Provider<F>, HasDependencies { method, implementationType); } - List<Type> parameterTypes = Lists.newArrayList(); + List<Type> parameterTypes = new ArrayList<>(); for (TypeLiteral<?> parameterType : factoryType.getParameterTypes(method)) { parameterTypes.add(parameterType.getType()); } @@ -281,7 +290,7 @@ public class FactoryProvider<F> implements Provider<F>, HasDependencies { @Override public Set<Dependency<?>> getDependencies() { - List<Dependency<?>> dependencies = Lists.newArrayList(); + List<Dependency<?>> dependencies = new ArrayList<>(); for (AssistedConstructor<?> constructor : factoryMethodToConstructor.values()) { for (Parameter parameter : constructor.getAllParameters()) { if (!parameter.isProvidedByFactory()) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider2.java b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider2.java index 2bfbcefe6b7..03b194816ea 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider2.java +++ b/core/src/main/java/org/elasticsearch/common/inject/assistedinject/FactoryProvider2.java @@ -19,8 +19,17 @@ package org.elasticsearch.common.inject.assistedinject; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.ProvisionException; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.spi.Message; @@ -30,6 +39,7 @@ import java.lang.annotation.Annotation; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -118,7 +128,7 @@ final class FactoryProvider2<F> implements InvocationHandler, Provider<F> { List<TypeLiteral<?>> params = factoryType.getParameterTypes(method); Annotation[][] paramAnnotations = method.getParameterAnnotations(); int p = 0; - List<Key<?>> keys = Lists.newArrayList(); + List<Key<?>> keys = new ArrayList<>(); for (TypeLiteral<?> param : params) { Key<?> paramKey = getKey(param, method, paramAnnotations[p++], errors); keys.add(assistKey(method, paramKey, errors)); diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/Errors.java b/core/src/main/java/org/elasticsearch/common/inject/internal/Errors.java index e898007231c..d3a73b3c326 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/Errors.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/Errors.java @@ -18,10 +18,20 @@ package org.elasticsearch.common.inject.internal; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.common.inject.*; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.CreationException; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.MembersInjector; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.ProvisionException; +import org.elasticsearch.common.inject.Scope; +import org.elasticsearch.common.inject.TypeLiteral; +import org.elasticsearch.common.inject.spi.Dependency; +import org.elasticsearch.common.inject.spi.InjectionListener; +import org.elasticsearch.common.inject.spi.InjectionPoint; +import org.elasticsearch.common.inject.spi.Message; +import org.elasticsearch.common.inject.spi.TypeListenerBinding; import java.io.PrintWriter; import java.io.Serializable; @@ -31,7 +41,12 @@ import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Member; import java.lang.reflect.Type; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.Formatter; +import java.util.List; +import java.util.Locale; /** * A collection of error messages. If this type is passed as a method parameter, the method is @@ -361,7 +376,7 @@ public final class Errors implements Serializable { } private Message merge(Message message) { - List<Object> sources = Lists.newArrayList(); + List<Object> sources = new ArrayList<>(); sources.addAll(getSources()); sources.addAll(message.getSources()); return new Message(sources, message.getMessage(), message.getCause()); @@ -384,7 +399,7 @@ public final class Errors implements Serializable { } public List<Object> getSources() { - List<Object> sources = Lists.newArrayList(); + List<Object> sources = new ArrayList<>(); for (Errors e = this; e != null; e = e.parent) { if (e.source != SourceProvider.UNKNOWN_SOURCE) { sources.add(0, e.source); @@ -421,7 +436,7 @@ public final class Errors implements Serializable { public Errors addMessage(Message message) { if (root.errors == null) { - root.errors = Lists.newArrayList(); + root.errors = new ArrayList<>(); } root.errors.add(message); return this; @@ -439,7 +454,7 @@ public final class Errors implements Serializable { return ImmutableList.of(); } - List<Message> result = Lists.newArrayList(root.errors); + List<Message> result = new ArrayList<>(root.errors); CollectionUtil.timSort(result, new Comparator<Message>() { @Override public int compare(Message a, Message b) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java b/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java index fd8348d8f34..65005992438 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java @@ -16,7 +16,7 @@ package org.elasticsearch.common.inject.internal; -import com.google.common.collect.Lists; +import org.elasticsearch.common.util.CollectionUtils; import java.io.IOException; import java.util.Arrays; @@ -98,7 +98,7 @@ public final class Join { public static String join( String delimiter, @Nullable Object firstToken, Object... otherTokens) { checkNotNull(otherTokens); - return join(delimiter, Lists.newArrayList(firstToken, otherTokens)); + return join(delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); } /** @@ -208,7 +208,7 @@ public final class Join { public static <T extends Appendable> T join(T appendable, String delimiter, @Nullable Object firstToken, Object... otherTokens) { checkNotNull(otherTokens); - return join(appendable, delimiter, Lists.newArrayList(firstToken, otherTokens)); + return join(appendable, delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); } /** diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java b/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java index 0e6a33f199f..59b5aaa0e7d 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java @@ -18,7 +18,6 @@ package org.elasticsearch.common.inject.internal; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Injector; @@ -28,6 +27,7 @@ import org.elasticsearch.common.inject.spi.Element; import org.elasticsearch.common.inject.spi.ElementVisitor; import org.elasticsearch.common.inject.spi.PrivateElements; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -47,8 +47,8 @@ public final class PrivateElementsImpl implements PrivateElements { private final Object source; - private List<Element> elementsMutable = Lists.newArrayList(); - private List<ExposureBuilder<?>> exposureBuilders = Lists.newArrayList(); + private List<Element> elementsMutable = new ArrayList<>(); + private List<ExposureBuilder<?>> exposureBuilders = new ArrayList<>(); /** * lazily instantiated diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java b/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java index 46705306c01..9884b88f43a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java @@ -17,8 +17,12 @@ package org.elasticsearch.common.inject.internal; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.Provides; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.util.Modules; @@ -26,6 +30,7 @@ import org.elasticsearch.common.inject.util.Modules; import java.lang.annotation.Annotation; import java.lang.reflect.Member; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.List; import static com.google.common.base.Preconditions.checkNotNull; @@ -74,7 +79,7 @@ public final class ProviderMethodsModule implements Module { } public List<ProviderMethod<?>> getProviderMethods(Binder binder) { - List<ProviderMethod<?>> result = Lists.newArrayList(); + List<ProviderMethod<?>> result = new ArrayList<>(); for (Class<?> c = delegate.getClass(); c != Object.class; c = c.getSuperclass()) { for (Method method : c.getDeclaredMethods()) { if (method.getAnnotation(Provides.class) != null) { @@ -90,8 +95,8 @@ public final class ProviderMethodsModule implements Module { Errors errors = new Errors(method); // prepare the parameter providers - List<Dependency<?>> dependencies = Lists.newArrayList(); - List<Provider<?>> parameterProviders = Lists.newArrayList(); + List<Dependency<?>> dependencies = new ArrayList<>(); + List<Provider<?>> parameterProviders = new ArrayList<>(); List<TypeLiteral<?>> parameterTypes = typeLiteral.getParameterTypes(method); Annotation[][] parameterAnnotations = method.getParameterAnnotations(); for (int i = 0; i < parameterTypes.size(); i++) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/SourceProvider.java b/core/src/main/java/org/elasticsearch/common/inject/internal/SourceProvider.java index 4c34be0d415..f0fa05c66dd 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/SourceProvider.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/SourceProvider.java @@ -18,8 +18,8 @@ package org.elasticsearch.common.inject.internal; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import java.util.ArrayList; import java.util.List; /** @@ -58,7 +58,7 @@ public class SourceProvider { * Returns the class names as Strings */ private static List<String> asStrings(Class... classes) { - List<String> strings = Lists.newArrayList(); + List<String> strings = new ArrayList<>(); for (Class c : classes) { strings.add(c.getName()); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index fac73d3ef57..0fb64a4e6d1 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -18,8 +18,15 @@ package org.elasticsearch.common.inject.multibindings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.ConfigurationException; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.LinkedBindingBuilder; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.Dependency; @@ -29,6 +36,7 @@ import org.elasticsearch.common.inject.util.Types; import java.lang.annotation.Annotation; import java.lang.reflect.Type; +import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; @@ -229,8 +237,8 @@ public abstract class Multibinder<T> { */ @Inject void initialize(Injector injector) { - providers = Lists.newArrayList(); - List<Dependency<?>> dependencies = Lists.newArrayList(); + providers = new ArrayList<>(); + List<Dependency<?>> dependencies = new ArrayList<>(); for (Binding<?> entry : injector.findBindingsByType(elementType)) { if (keyMatches(entry.getKey())) { diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java index f0891f5b456..8f1ce52b116 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java @@ -18,9 +18,9 @@ package org.elasticsearch.common.inject.spi; import com.google.common.base.Objects; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.Key; +import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -60,7 +60,7 @@ public final class Dependency<T> { * Returns the dependencies from the given injection points. */ public static Set<Dependency<?>> forInjectionPoints(Set<InjectionPoint> injectionPoints) { - List<Dependency<?>> dependencies = Lists.newArrayList(); + List<Dependency<?>> dependencies = new ArrayList<>(); for (InjectionPoint injectionPoint : injectionPoints) { dependencies.addAll(injectionPoint.getDependencies()); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index c6dc1e4ef4b..965223604b5 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -17,19 +17,41 @@ package org.elasticsearch.common.inject.spi; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import org.elasticsearch.common.inject.*; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.MembersInjector; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.PrivateBinder; +import org.elasticsearch.common.inject.PrivateModule; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.inject.Scope; +import org.elasticsearch.common.inject.Stage; +import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedConstantBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; +import org.elasticsearch.common.inject.internal.BindingBuilder; +import org.elasticsearch.common.inject.internal.ConstantBindingBuilderImpl; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ExposureBuilder; +import org.elasticsearch.common.inject.internal.PrivateElementsImpl; +import org.elasticsearch.common.inject.internal.ProviderMethodsModule; +import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import java.lang.annotation.Annotation; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; @@ -121,7 +143,7 @@ public final class Elements { private RecordingBinder(Stage stage) { this.stage = stage; this.modules = Sets.newHashSet(); - this.elements = Lists.newArrayList(); + this.elements = new ArrayList<>(); this.source = null; this.sourceProvider = new SourceProvider().plusSkippedClasses( Elements.class, RecordingBinder.class, AbstractModule.class, diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java b/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java index 8aef647f994..aa7e0793838 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java @@ -18,16 +18,29 @@ package org.elasticsearch.common.inject.spi; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.ConfigurationException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Key; import org.elasticsearch.common.inject.TypeLiteral; -import org.elasticsearch.common.inject.internal.*; +import org.elasticsearch.common.inject.internal.Annotations; +import org.elasticsearch.common.inject.internal.Errors; +import org.elasticsearch.common.inject.internal.ErrorsException; +import org.elasticsearch.common.inject.internal.MoreTypes; +import org.elasticsearch.common.inject.internal.Nullability; import java.lang.annotation.Annotation; -import java.lang.reflect.*; -import java.util.*; +import java.lang.reflect.AnnotatedElement; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Member; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Set; import static org.elasticsearch.common.inject.internal.MoreTypes.getRawType; @@ -93,7 +106,7 @@ public final class InjectionPoint { Errors errors = new Errors(member); Iterator<Annotation[]> annotationsIterator = Arrays.asList(parameterAnnotations).iterator(); - List<Dependency<?>> dependencies = Lists.newArrayList(); + List<Dependency<?>> dependencies = new ArrayList<>(); int index = 0; for (TypeLiteral<?> parameterType : type.getParameterTypes(member)) { @@ -240,7 +253,7 @@ public final class InjectionPoint { * of the valid injection points. */ public static Set<InjectionPoint> forStaticMethodsAndFields(TypeLiteral type) { - List<InjectionPoint> sink = Lists.newArrayList(); + List<InjectionPoint> sink = new ArrayList<>(); Errors errors = new Errors(); addInjectionPoints(type, Factory.FIELDS, true, sink, errors); @@ -280,7 +293,7 @@ public final class InjectionPoint { * of the valid injection points. */ public static Set<InjectionPoint> forInstanceMethodsAndFields(TypeLiteral<?> type) { - List<InjectionPoint> sink = Lists.newArrayList(); + List<InjectionPoint> sink = new ArrayList<>(); Errors errors = new Errors(); // TODO (crazybob): Filter out overridden members. diff --git a/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java b/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java index ae2205836bc..2849cc45b40 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java +++ b/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java @@ -17,13 +17,24 @@ package org.elasticsearch.common.inject.util; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.elasticsearch.common.inject.*; -import org.elasticsearch.common.inject.spi.*; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Binder; +import org.elasticsearch.common.inject.Binding; +import org.elasticsearch.common.inject.Key; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.PrivateBinder; +import org.elasticsearch.common.inject.Scope; +import org.elasticsearch.common.inject.spi.DefaultBindingScopingVisitor; +import org.elasticsearch.common.inject.spi.DefaultElementVisitor; +import org.elasticsearch.common.inject.spi.Element; +import org.elasticsearch.common.inject.spi.Elements; +import org.elasticsearch.common.inject.spi.PrivateElements; +import org.elasticsearch.common.inject.spi.ScopeBinding; import java.lang.annotation.Annotation; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -168,7 +179,7 @@ public final class Modules { // overridden binding once so things still blow up if the module binds the same thing // multiple times. final Map<Scope, Object> scopeInstancesInUse = Maps.newHashMap(); - final List<ScopeBinding> scopeBindings = Lists.newArrayList(); + final List<ScopeBinding> scopeBindings = new ArrayList<>(); new ModuleWriter(binder()) { @Override public <T> Void visit(Binding<T> binding) { diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index 166e7960b3c..e6265ab01ed 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -21,10 +21,17 @@ package org.elasticsearch.common.io; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.elasticsearch.common.util.Callback; -import java.io.*; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.Reader; +import java.io.StringWriter; +import java.io.Writer; +import java.util.ArrayList; import java.util.List; /** @@ -216,7 +223,7 @@ public abstract class Streams { } public static List<String> readAllLines(InputStream input) throws IOException { - final List<String> lines = Lists.newArrayList(); + final List<String> lines = new ArrayList<>(); readAllLines(input, new Callback<String>() { @Override public void handle(String line) { diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java index 93d4cc3d111..1dafcff62d3 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.logging; -import com.google.common.collect.Lists; import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.common.Classes; import org.elasticsearch.common.settings.Settings; @@ -28,10 +27,11 @@ import org.elasticsearch.index.shard.ShardId; import java.net.InetAddress; import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; import static java.util.Arrays.asList; +import static org.elasticsearch.common.util.CollectionUtils.asArrayList; /** * A set of utilities around Logging. @@ -59,16 +59,16 @@ public class Loggers { } public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) { - return getLogger(clazz, settings, shardId.index(), Lists.asList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); + return getLogger(clazz, settings, shardId.index(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } /** Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of Class. */ public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) { - return getLogger(loggerName, settings, Lists.asList(shardId.index().name(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); + return getLogger(loggerName, settings, asArrayList(shardId.index().name(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) { - return getLogger(clazz, settings, Lists.asList(SPACE, index.name(), prefixes).toArray(new String[0])); + return getLogger(clazz, settings, asArrayList(SPACE, index.name(), prefixes).toArray(new String[0])); } public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) { @@ -86,7 +86,7 @@ public class Loggers { @SuppressForbidden(reason = "do not know what this method does") public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) { - List<String> prefixesList = newArrayList(); + List<String> prefixesList = new ArrayList<>(); if (settings.getAsBoolean("logger.logHostAddress", false)) { final InetAddress addr = getHostAddress(); if (addr != null) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 4c1e7c49394..600c23899cb 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -20,46 +20,14 @@ package org.elasticsearch.common.lucene; import com.google.common.collect.Iterables; - import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexCommit; -import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.index.IndexFormatTooNewException; -import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.SegmentCommitInfo; -import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.FieldDoc; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; -import org.apache.lucene.search.SortField; -import org.apache.lucene.search.TimeLimitingCollector; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopFieldDocs; -import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.Lock; -import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; +import org.apache.lucene.store.*; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Counter; @@ -75,14 +43,11 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.text.ParseException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; +import java.util.*; import static org.elasticsearch.common.lucene.search.NoopCollector.NOOP_COLLECTOR; @@ -379,6 +344,24 @@ public class Lucene { return false; } + /** + * Performs an exists (count > 0) query on the searcher from the <code>searchContext</code> for <code>query</code> + * using the given <code>collector</code> + * + * The <code>collector</code> can be instantiated using <code>Lucene.createExistsCollector()</code> + */ + public static boolean exists(SearchContext searchContext, Query query, EarlyTerminatingCollector collector) throws IOException { + collector.reset(); + try { + searchContext.searcher().search(query, collector); + } catch (EarlyTerminationException e) { + // ignore, just early termination... + } finally { + searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); + } + return collector.exists(); + } + /** * Creates an {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector} * with a threshold of <code>1</code> diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java index fb6ebc28e1c..5106cdaf979 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.lucene.all; -import com.google.common.collect.Lists; import org.elasticsearch.common.io.FastCharArrayWriter; import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -66,7 +66,7 @@ public class AllEntries extends Reader { } } - private final List<Entry> entries = Lists.newArrayList(); + private final List<Entry> entries = new ArrayList<>(); private Entry current; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 52b102ed5b3..8e8c47c4614 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.lucene.index; -import com.google.common.collect.Lists; - import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; @@ -37,6 +35,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -79,7 +78,7 @@ public class FilterableTermsEnum extends TermsEnum { numDocs = reader.maxDoc(); } List<LeafReaderContext> leaves = reader.leaves(); - List<Holder> enums = Lists.newArrayListWithExpectedSize(leaves.size()); + List<Holder> enums = new ArrayList<>(leaves.size()); final Weight weight; if (filter == null) { weight = null; diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 13383d68468..096432dd789 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -24,7 +24,6 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; @@ -35,17 +34,31 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.property.PropertyPlaceholder; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.settings.loader.SettingsLoaderFactory; -import org.elasticsearch.common.unit.*; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; +import org.elasticsearch.common.unit.RatioValue; +import org.elasticsearch.common.unit.SizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -180,7 +193,7 @@ public final class Settings implements ToXContent { } } if (isArray && (maxIndex + 1) == map.size()) { - ArrayList<Object> newValue = Lists.newArrayListWithExpectedSize(maxIndex + 1); + ArrayList<Object> newValue = new ArrayList<>(maxIndex + 1); for (int i = 0; i <= maxIndex; i++) { Object obj = map.get(Integer.toString(i)); if (obj == null) { @@ -558,7 +571,7 @@ public final class Settings implements ToXContent { * @throws org.elasticsearch.common.settings.SettingsException */ public String[] getAsArray(String settingPrefix, String[] defaultArray, Boolean commaDelimited) throws SettingsException { - List<String> result = Lists.newArrayList(); + List<String> result = new ArrayList<>(); if (get(settingPrefix) != null) { if (commaDelimited) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java index 56739c0f19f..d2bde251330 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java @@ -25,14 +25,13 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.rest.RestRequest; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CopyOnWriteArrayList; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -89,7 +88,7 @@ public class SettingsFilter extends AbstractComponent { public static Settings filterSettings(String patterns, Settings settings) { String[] patternArray = Strings.delimitedListToStringArray(patterns, ","); Settings.Builder builder = Settings.settingsBuilder().put(settings); - List<String> simpleMatchPatternList = newArrayList(); + List<String> simpleMatchPatternList = new ArrayList<>(); for (String pattern : patternArray) { if (Regex.isSimpleMatchPattern(pattern)) { simpleMatchPatternList.add(pattern); diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoader.java index 0bc97376bb0..8bff4ad0255 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoader.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings.loader; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.io.stream.StreamInput; @@ -36,7 +37,7 @@ public class PropertiesSettingsLoader implements SettingsLoader { @Override public Map<String, String> load(String source) throws IOException { - Properties props = new Properties(); + Properties props = new NoDuplicatesProperties(); FastStringReader reader = new FastStringReader(source); try { props.load(reader); @@ -52,7 +53,7 @@ public class PropertiesSettingsLoader implements SettingsLoader { @Override public Map<String, String> load(byte[] source) throws IOException { - Properties props = new Properties(); + Properties props = new NoDuplicatesProperties(); StreamInput stream = StreamInput.wrap(source); try { props.load(stream); @@ -65,4 +66,15 @@ public class PropertiesSettingsLoader implements SettingsLoader { IOUtils.closeWhileHandlingException(stream); } } + + class NoDuplicatesProperties extends Properties { + @Override + public synchronized Object put(Object key, Object value) { + Object previousValue = super.put(key, value); + if (previousValue != null) { + throw new ElasticsearchParseException("duplicate settings key [{}] found, previous value [{}], current value [{}]", key, previousValue, value); + } + return previousValue; + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/SettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/SettingsLoader.java index 86e905e9f52..3760f3f87d5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/SettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/SettingsLoader.java @@ -22,10 +22,10 @@ package org.elasticsearch.common.settings.loader; import org.elasticsearch.common.Nullable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; /** @@ -42,7 +42,7 @@ public interface SettingsLoader { return settings; } StringBuilder sb = new StringBuilder(); - List<String> path = newArrayList(); + List<String> path = new ArrayList<>(); serializeMap(settings, sb, path, map); return settings; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java index e3e08fb93f2..945e8298138 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java @@ -20,16 +20,15 @@ package org.elasticsearch.common.settings.loader; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; /** @@ -57,7 +56,7 @@ public abstract class XContentSettingsLoader implements SettingsLoader { public Map<String, String> load(XContentParser jp) throws IOException { StringBuilder sb = new StringBuilder(); Map<String, String> settings = newHashMap(); - List<String> path = newArrayList(); + List<String> path = new ArrayList<>(); XContentParser.Token token = jp.nextToken(); if (token == null) { return settings; @@ -141,7 +140,18 @@ public abstract class XContentSettingsLoader implements SettingsLoader { sb.append(pathEle).append('.'); } sb.append(fieldName); - settings.put(sb.toString(), parser.text()); + String key = sb.toString(); + String currentValue = parser.text(); + String previousValue = settings.put(key, currentValue); + if (previousValue != null) { + throw new ElasticsearchParseException( + "duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]", + key, + parser.getTokenLocation().lineNumber, + parser.getTokenLocation().columnNumber, + previousValue, + currentValue + ); + } } - } diff --git a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index 48150c704f7..17d78dc56b1 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -23,14 +23,25 @@ import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.ObjectArrayList; +import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import org.apache.lucene.util.*; -import org.elasticsearch.common.inject.Module; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefArray; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.InPlaceMergeSorter; +import org.apache.lucene.util.IntroSorter; -import java.util.*; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.RandomAccess; /** Collections-related utility methods. */ public enum CollectionUtils { @@ -368,8 +379,102 @@ public enum CollectionUtils { return Iterators.<T>concat(iterators); } - public static <E> ArrayList<E> newArrayList(E... elements) { - return Lists.newArrayList(elements); + public static <E> ArrayList<E> iterableAsArrayList(Iterable<? extends E> elements) { + if (elements == null) { + throw new NullPointerException("elements"); + } + if (elements instanceof Collection) { + return new ArrayList<>((Collection)elements); + } else { + ArrayList<E> list = new ArrayList<>(); + for (E element : elements) { + list.add(element); + } + return list; + } } + public static <E, T> List<T> eagerTransform(List<E> list, Function<E, T> transform) { + if (list == null) { + throw new NullPointerException("list"); + } + if (transform == null) { + throw new NullPointerException("transform"); + } + List<T> result = new ArrayList<>(list.size()); + for (E element : list) { + result.add(transform.apply(element)); + } + return result; + } + + public static <E> ArrayList<E> arrayAsArrayList(E... elements) { + if (elements == null) { + throw new NullPointerException("elements"); + } + return new ArrayList<>(Arrays.asList(elements)); + } + + public static <E> ArrayList<E> asArrayList(E first, E... other) { + if (other == null) { + throw new NullPointerException("other"); + } + ArrayList<E> list = new ArrayList<>(1 + other.length); + list.add(first); + list.addAll(Arrays.asList(other)); + return list; + } + + public static<E> ArrayList<E> asArrayList(E first, E second, E... other) { + if (other == null) { + throw new NullPointerException("other"); + } + ArrayList<E> list = new ArrayList<>(1 + 1 + other.length); + list.add(first); + list.add(second); + list.addAll(Arrays.asList(other)); + return list; + } + + public static <E> ArrayList<E> newSingletonArrayList(E element) { + return new ArrayList<>(Collections.singletonList(element)); + } + + public static <E> LinkedList<E> newLinkedList(Iterable<E> elements) { + if (elements == null) { + throw new NullPointerException("elements"); + } + LinkedList<E> linkedList = new LinkedList<>(); + for (E element : elements) { + linkedList.add(element); + } + return linkedList; + } + + public static <E> List<List<E>> eagerPartition(List<E> list, int size) { + if (list == null) { + throw new NullPointerException("list"); + } + if (size <= 0) { + throw new IllegalArgumentException("size <= 0"); + } + List<List<E>> result = new ArrayList<>((int) Math.ceil(list.size() / size)); + + List<E> accumulator = new ArrayList<>(size); + int count = 0; + for (E element : list) { + if (count == size) { + result.add(accumulator); + accumulator = new ArrayList<>(size); + count = 0; + } + accumulator.add(element); + count++; + } + if (count > 0) { + result.add(accumulator); + } + + return result; + } } diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java index 3425d151c34..82ed6f2bde8 100644 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java @@ -198,7 +198,7 @@ public class MultiDataPathUpgrader { } } } - return new ShardPath(target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard); + return new ShardPath(false, target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard); } private ShardFileInfo[] getShardFileInfo(ShardId shard, NodeEnvironment.NodePath[] paths) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java index 38c0cb23234..aad6cee12fe 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java @@ -18,14 +18,20 @@ */ package org.elasticsearch.common.util.concurrent; -import com.google.common.collect.Lists; - import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; +import java.util.ArrayList; import java.util.List; import java.util.Queue; -import java.util.concurrent.*; +import java.util.concurrent.Callable; +import java.util.concurrent.FutureTask; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.RunnableFuture; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** @@ -46,9 +52,9 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { } public Pending[] getPending() { - List<Pending> pending = Lists.newArrayList(); - addPending(Lists.newArrayList(current), pending, true); - addPending(Lists.newArrayList(getQueue()), pending, false); + List<Pending> pending = new ArrayList<>(); + addPending(new ArrayList<>(current), pending, true); + addPending(new ArrayList<>(getQueue()), pending, false); return pending.toArray(new Pending[pending.size()]); } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 1067d597334..fc10ab4b92a 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.xcontent.support; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; @@ -40,7 +39,7 @@ public class XContentMapValues { * as a single list. */ public static List<Object> extractRawValues(String path, Map<String, Object> map) { - List<Object> values = Lists.newArrayList(); + List<Object> values = new ArrayList<>(); String[] pathElements = Strings.splitStringToArray(path, '.'); if (pathElements.length == 0) { return values; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index d1b790672ee..b62db46aca9 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -20,11 +20,17 @@ package org.elasticsearch.discovery.zen; import com.google.common.base.Objects; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateNonMasterUpdateTask; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.ProcessedClusterStateNonMasterUpdateTask; +import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -33,9 +39,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; @@ -61,7 +64,13 @@ import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; @@ -75,7 +84,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; /** @@ -163,7 +171,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen pingTimeout = settings.getAsTime("discovery.zen.ping_timeout", pingTimeout); this.pingTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, pingTimeout); - this.joinTimeout = settings.getAsTime(SETTING_JOIN_TIMEOUT, TimeValue.timeValueMillis(pingTimeout.millis() * 20)); + this.joinTimeout = settings.getAsTime(SETTING_JOIN_TIMEOUT, TimeValue.timeValueMillis(this.pingTimeout.millis() * 20)); this.joinRetryAttempts = settings.getAsInt(SETTING_JOIN_RETRY_ATTEMPTS, 3); this.joinRetryDelay = settings.getAsTime(SETTING_JOIN_RETRY_DELAY, TimeValue.timeValueMillis(100)); this.maxPingsFromAnotherMaster = settings.getAsInt(SETTING_MAX_PINGS_FROM_ANOTHER_MASTER, 3); @@ -903,7 +911,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen } // filter responses - List<ZenPing.PingResponse> pingResponses = Lists.newArrayList(); + List<ZenPing.PingResponse> pingResponses = new ArrayList<>(); for (ZenPing.PingResponse pingResponse : fullPingResponses) { DiscoveryNode node = pingResponse.node(); if (masterElectionFilterClientNodes && (node.clientNode() || (!node.masterNode() && !node.dataNode()))) { @@ -928,7 +936,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen } final DiscoveryNode localNode = clusterService.localNode(); - List<DiscoveryNode> pingMasters = newArrayList(); + List<DiscoveryNode> pingMasters = new ArrayList<>(); for (ZenPing.PingResponse pingResponse : pingResponses) { if (pingResponse.master() != null) { // We can't include the local node in pingMasters list, otherwise we may up electing ourselves without diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java index ce65861df61..80b3ec0f4e6 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.elect; import com.carrotsearch.hppc.ObjectContainer; -import com.google.common.collect.Lists; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -29,8 +28,13 @@ import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; /** * @@ -98,7 +102,7 @@ public class ElectMasterService extends AbstractComponent { * @return */ public List<DiscoveryNode> sortByMasterLikelihood(Iterable<DiscoveryNode> nodes) { - ArrayList<DiscoveryNode> sortedNodes = Lists.newArrayList(nodes); + ArrayList<DiscoveryNode> sortedNodes = CollectionUtils.iterableAsArrayList(nodes); CollectionUtil.introSort(sortedNodes, nodeComparator); return sortedNodes; } @@ -111,7 +115,7 @@ public class ElectMasterService extends AbstractComponent { if (sortedNodes == null) { return new DiscoveryNode[0]; } - List<DiscoveryNode> nextPossibleMasters = Lists.newArrayListWithCapacity(numberOfPossibleMasters); + List<DiscoveryNode> nextPossibleMasters = new ArrayList<>(numberOfPossibleMasters); int counter = 0; for (DiscoveryNode nextPossibleMaster : sortedNodes) { if (++counter >= numberOfPossibleMasters) { @@ -142,7 +146,7 @@ public class ElectMasterService extends AbstractComponent { } private List<DiscoveryNode> sortedMasterNodes(Iterable<DiscoveryNode> nodes) { - List<DiscoveryNode> possibleNodes = Lists.newArrayList(nodes); + List<DiscoveryNode> possibleNodes = CollectionUtils.iterableAsArrayList(nodes); if (possibleNodes.isEmpty()) { return null; } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 06820a9f066..741f877a55c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.ping.unicast; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Lists; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -35,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -43,17 +43,36 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.Closeable; import java.io.IOException; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.unit.TimeValue.readTimeValue; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; import static org.elasticsearch.discovery.zen.ping.ZenPing.PingResponse.readPingResponse; @@ -122,7 +141,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen for (int i = 0; i < hostArr.length; i++) { hostArr[i] = hostArr[i].trim(); } - List<String> hosts = Lists.newArrayList(hostArr); + List<String> hosts = CollectionUtils.arrayAsArrayList(hostArr); final int limitPortCounts; if (hosts.isEmpty()) { // if unicast hosts are not specified, fill with simple defaults on the local machine @@ -135,7 +154,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen logger.debug("using initial hosts {}, with concurrent_connects [{}]", hosts, concurrentConnects); - List<DiscoveryNode> configuredTargetNodes = Lists.newArrayList(); + List<DiscoveryNode> configuredTargetNodes = new ArrayList<>(); for (String host : hosts) { try { TransportAddress[] addresses = transportService.addressesFromString(host, limitPortCounts); @@ -325,7 +344,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen List<DiscoveryNode> sortedNodesToPing = electMasterService.sortByMasterLikelihood(nodesToPingSet); // new add the the unicast targets first - ArrayList<DiscoveryNode> nodesToPing = Lists.newArrayList(configuredTargetNodes); + List<DiscoveryNode> nodesToPing = CollectionUtils.arrayAsArrayList(configuredTargetNodes); nodesToPing.addAll(sortedNodesToPing); final CountDownLatch latch = new CountDownLatch(nodesToPing.size()); @@ -485,7 +504,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen } }); - List<PingResponse> pingResponses = newArrayList(temporalResponses); + List<PingResponse> pingResponses = CollectionUtils.iterableAsArrayList(temporalResponses); pingResponses.add(createPingResponse(contextProvider.nodes())); diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index b927cbbde84..43398c55917 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -41,9 +41,11 @@ import org.elasticsearch.env.NodeEnvironment; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; - -import static com.google.common.collect.Lists.newArrayList; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * @@ -221,7 +223,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL */ private void pre20Upgrade() throws Exception { MetaData metaData = loadMetaState(); - List<IndexMetaData> updateIndexMetaData = newArrayList(); + List<IndexMetaData> updateIndexMetaData = new ArrayList<>(); for (IndexMetaData indexMetaData : metaData) { IndexMetaData newMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); if (indexMetaData != newMetaData) { diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index b4bf6367f13..695de267894 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -34,7 +33,12 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.settings.IndexSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * The primary shard allocator allocates primary shard that were not created as @@ -234,7 +238,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { // Now that we have a map of nodes to versions along with the // number of allocations found (and not ignored), we need to sort // it so the node with the highest version is at the beginning - List<DiscoveryNode> nodesWithHighestVersion = Lists.newArrayList(); + List<DiscoveryNode> nodesWithHighestVersion = new ArrayList<>(); nodesWithHighestVersion.addAll(nodesWithVersion.keySet()); CollectionUtil.timSort(nodesWithHighestVersion, new Comparator<DiscoveryNode>() { @Override diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index a9362a56fe9..accccc4616c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -19,12 +19,15 @@ package org.elasticsearch.gateway; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.*; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -40,6 +43,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -86,8 +90,8 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction<Tra @Override protected NodesGatewayMetaState newResponse(Request request, AtomicReferenceArray responses) { - final List<NodeGatewayMetaState> nodesList = Lists.newArrayList(); - final List<FailedNodeException> failures = Lists.newArrayList(); + final List<NodeGatewayMetaState> nodesList = new ArrayList<>(); + final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeGatewayMetaState) { // will also filter out null response for unallocated ones diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index f431fcdfeb7..11cbbef9eba 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -19,13 +19,15 @@ package org.elasticsearch.gateway; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.*; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -45,6 +47,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -97,8 +100,8 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction @Override protected NodesGatewayStartedShards newResponse(Request request, AtomicReferenceArray responses) { - final List<NodeGatewayStartedShards> nodesList = Lists.newArrayList(); - final List<FailedNodeException> failures = Lists.newArrayList(); + final List<NodeGatewayStartedShards> nodesList = new ArrayList<>(); + final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeGatewayStartedShards) { // will also filter out null response for unallocated ones diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 7be71c89ece..d22a7540491 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.*; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; @@ -40,7 +39,6 @@ import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.shard.StoreRecoveryService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.settings.IndexSettings; @@ -59,8 +57,10 @@ import org.elasticsearch.plugins.PluginsService; import java.io.Closeable; import java.io.IOException; +import java.nio.file.Path; import java.util.HashMap; import java.util.Iterator; +import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -102,7 +102,25 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone private final NodeEnvironment nodeEnv; private final IndicesService indicesServices; - private volatile ImmutableMap<Integer, Tuple<IndexShard, Injector>> shards = ImmutableMap.of(); + private volatile ImmutableMap<Integer, IndexShardInjectorPair> shards = ImmutableMap.of(); + + private static class IndexShardInjectorPair { + private final IndexShard indexShard; + private final Injector injector; + + public IndexShardInjectorPair(IndexShard indexShard, Injector injector) { + this.indexShard = indexShard; + this.injector = injector; + } + + public IndexShard getIndexShard() { + return indexShard; + } + + public Injector getInjector() { + return injector; + } + } private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false); @@ -147,10 +165,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public Iterator<IndexShard> iterator() { - return Iterators.transform(shards.values().iterator(), new Function<Tuple<IndexShard, Injector>, IndexShard>() { + return Iterators.transform(shards.values().iterator(), new Function<IndexShardInjectorPair, IndexShard>() { @Override - public IndexShard apply(Tuple<IndexShard, Injector> input) { - return input.v1(); + public IndexShard apply(IndexShardInjectorPair input) { + return input.getIndexShard(); } }); } @@ -164,9 +182,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone */ @Nullable public IndexShard shard(int shardId) { - Tuple<IndexShard, Injector> indexShardInjectorTuple = shards.get(shardId); - if (indexShardInjectorTuple != null) { - return indexShardInjectorTuple.v1(); + IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId); + if (indexShardInjectorPair != null) { + return indexShardInjectorPair.getIndexShard(); } return null; } @@ -244,11 +262,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone * Return the shard injector for the provided id, or throw an exception if there is no such shard. */ public Injector shardInjectorSafe(int shardId) { - Tuple<IndexShard, Injector> tuple = shards.get(shardId); - if (tuple == null) { + IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId); + if (indexShardInjectorPair == null) { throw new ShardNotFoundException(new ShardId(index, shardId)); } - return tuple.v2(); + return indexShardInjectorPair.getInjector(); } public String indexUUID() { @@ -286,6 +304,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone Injector shardInjector = null; try { lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5)); + indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings); ShardPath path; try { path = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings); @@ -299,8 +318,23 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone throw t; } } + if (path == null) { - path = ShardPath.selectNewPathForShard(nodeEnv, shardId, indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(), this); + // TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard + // that's being relocated/replicated we know how large it will become once it's done copying: + + // Count up how many shards are currently on each data path: + Map<Path,Integer> dataPathToShardCount = new HashMap<>(); + for(IndexShard shard : this) { + Path dataPath = shard.shardPath().getRootStatePath(); + Integer curCount = dataPathToShardCount.get(dataPath); + if (curCount == null) { + curCount = 0; + } + dataPathToShardCount.put(dataPath, curCount+1); + } + path = ShardPath.selectNewPathForShard(nodeEnv, shardId, indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(), + dataPathToShardCount); logger.debug("{} creating using a new path [{}]", shardId, path); } else { logger.debug("{} creating using an existing path [{}]", shardId, path); @@ -310,7 +344,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone throw new IndexShardAlreadyExistsException(shardId + " already exists"); } - indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings); logger.debug("creating shard_id {}", shardId); // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || @@ -348,7 +381,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created"); indicesLifecycle.afterIndexShardCreated(indexShard); - shards = newMapBuilder(shards).put(shardId.id(), new Tuple<>(indexShard, shardInjector)).immutableMap(); + shards = newMapBuilder(shards).put(shardId.id(), new IndexShardInjectorPair(indexShard, shardInjector)).immutableMap(); success = true; return indexShard; } catch (IOException e) { @@ -374,10 +407,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone return; } logger.debug("[{}] closing... (reason: [{}])", shardId, reason); - HashMap<Integer, Tuple<IndexShard, Injector>> tmpShardsMap = newHashMap(shards); - Tuple<IndexShard, Injector> tuple = tmpShardsMap.remove(shardId); - indexShard = tuple.v1(); - shardInjector = tuple.v2(); + HashMap<Integer, IndexShardInjectorPair> tmpShardsMap = newHashMap(shards); + IndexShardInjectorPair indexShardInjectorPair = tmpShardsMap.remove(shardId); + indexShard = indexShardInjectorPair.getIndexShard(); + shardInjector = indexShardInjectorPair.getInjector(); shards = ImmutableMap.copyOf(tmpShardsMap); closeShardInjector(reason, sId, shardInjector, indexShard); logger.debug("[{}] closed (reason: [{}])", shardId, reason); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java index 38f846a57f4..21667cf6128 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java @@ -19,15 +19,12 @@ package org.elasticsearch.index.analysis; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Scopes; import org.elasticsearch.common.inject.assistedinject.FactoryProvider; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.NoClassSettingsException; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; import org.elasticsearch.indices.analysis.IndicesAnalysisService; @@ -108,7 +105,7 @@ public class AnalysisModule extends AbstractModule { private final IndicesAnalysisService indicesAnalysisService; - private final LinkedList<AnalysisBinderProcessor> processors = Lists.newLinkedList(); + private final LinkedList<AnalysisBinderProcessor> processors = new LinkedList<>(); private final Map<String, Class<? extends CharFilterFactory>> charFilters = Maps.newHashMap(); private final Map<String, Class<? extends TokenFilterFactory>> tokenFilters = Maps.newHashMap(); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 1cc37b8cda8..5b7f54289d3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.analysis.IndicesAnalysisService; @@ -215,19 +216,38 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable Map<String, NamedAnalyzer> analyzers = newHashMap(); for (AnalyzerProvider analyzerFactory : analyzerProviders.values()) { + /* + * Lucene defaults positionIncrementGap to 0 in all analyzers but + * Elasticsearch defaults them to 0 only before version 2.0 + * and 100 afterwards so we override the positionIncrementGap if it + * doesn't match here. + */ + int overridePositionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(Version.indexCreated(indexSettings)); if (analyzerFactory instanceof CustomAnalyzerProvider) { ((CustomAnalyzerProvider) analyzerFactory).build(this); + /* + * Custom analyzers already default to the correct, version + * dependent positionIncrementGap and the user is be able to + * configure the positionIncrementGap directly on the analyzer so + * we disable overriding the positionIncrementGap to preserve the + * user's setting. + */ + overridePositionIncrementGap = Integer.MIN_VALUE; } Analyzer analyzerF = analyzerFactory.get(); if (analyzerF == null) { throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer"); } NamedAnalyzer analyzer; - // if we got a named analyzer back, use it... if (analyzerF instanceof NamedAnalyzer) { + // if we got a named analyzer back, use it... analyzer = (NamedAnalyzer) analyzerF; + if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { + // unless the positionIncrementGap needs to be overridden + analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); + } } else { - analyzer = new NamedAnalyzer(analyzerFactory.name(), analyzerFactory.scope(), analyzerF); + analyzer = new NamedAnalyzer(analyzerFactory.name(), analyzerFactory.scope(), analyzerF, overridePositionIncrementGap); } analyzers.put(analyzerFactory.name(), analyzer); analyzers.put(Strings.toCamelCase(analyzerFactory.name()), analyzer); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java index 97cd57e4ce8..b68a321359e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java @@ -44,11 +44,11 @@ public final class CustomAnalyzer extends Analyzer { } public CustomAnalyzer(TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, TokenFilterFactory[] tokenFilters, - int positionOffsetGap, int offsetGap) { + int positionIncrementGap, int offsetGap) { this.tokenizerFactory = tokenizerFactory; this.charFilters = charFilters; this.tokenFilters = tokenFilters; - this.positionIncrementGap = positionOffsetGap; + this.positionIncrementGap = positionIncrementGap; this.offsetGap = offsetGap; } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index 8975ba00440..43f0008d61c 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -19,16 +19,17 @@ package org.elasticsearch.index.analysis; +import org.elasticsearch.Version; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.settings.IndexSettings; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * A custom analyzer that is built out of a single {@link org.apache.lucene.analysis.Tokenizer} and a list * of {@link org.apache.lucene.analysis.TokenFilter}s. @@ -57,7 +58,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]"); } - List<CharFilterFactory> charFilters = newArrayList(); + List<CharFilterFactory> charFilters = new ArrayList<>(); String[] charFilterNames = analyzerSettings.getAsArray("char_filter"); for (String charFilterName : charFilterNames) { CharFilterFactory charFilter = analysisService.charFilter(charFilterName); @@ -67,7 +68,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom charFilters.add(charFilter); } - List<TokenFilterFactory> tokenFilters = newArrayList(); + List<TokenFilterFactory> tokenFilters = new ArrayList<>(); String[] tokenFilterNames = analyzerSettings.getAsArray("filter"); for (String tokenFilterName : tokenFilterNames) { TokenFilterFactory tokenFilter = analysisService.tokenFilter(tokenFilterName); @@ -77,13 +78,28 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom tokenFilters.add(tokenFilter); } - int positionOffsetGap = analyzerSettings.getAsInt("position_offset_gap", 0); - int offsetGap = analyzerSettings.getAsInt("offset_gap", -1); + int positionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(Version.indexCreated(indexSettings)); + if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){ + if (Version.indexCreated(indexSettings).before(Version.V_2_0_0)){ + if (analyzerSettings.getAsMap().containsKey("position_increment_gap")){ + throw new IllegalArgumentException("Custom Analyzer [" + name() + + "] defined both [position_offset_gap] and [position_increment_gap], use only [position_increment_gap]"); + } + positionIncrementGap = analyzerSettings.getAsInt("position_offset_gap", positionIncrementGap); + }else { + throw new IllegalArgumentException("Option [position_offset_gap] in Custom Analyzer [" + name() + + "] has been renamed, please use [position_increment_gap] instead."); + } + } + + positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap); + + int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);; this.customAnalyzer = new CustomAnalyzer(tokenizer, charFilters.toArray(new CharFilterFactory[charFilters.size()]), tokenFilters.toArray(new TokenFilterFactory[tokenFilters.size()]), - positionOffsetGap, + positionIncrementGap, offsetGap ); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java index fde961c52ec..f3a50390667 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java @@ -31,10 +31,10 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper { private final String name; private final AnalyzerScope scope; private final Analyzer analyzer; - private final int positionOffsetGap; + private final int positionIncrementGap; - public NamedAnalyzer(NamedAnalyzer analyzer, int positionOffsetGap) { - this(analyzer.name(), analyzer.scope(), analyzer.analyzer(), positionOffsetGap); + public NamedAnalyzer(NamedAnalyzer analyzer, int positionIncrementGap) { + this(analyzer.name(), analyzer.scope(), analyzer.analyzer(), positionIncrementGap); } public NamedAnalyzer(String name, Analyzer analyzer) { @@ -45,12 +45,12 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper { this(name, scope, analyzer, Integer.MIN_VALUE); } - public NamedAnalyzer(String name, AnalyzerScope scope, Analyzer analyzer, int positionOffsetGap) { + public NamedAnalyzer(String name, AnalyzerScope scope, Analyzer analyzer, int positionIncrementGap) { super(ERROR_STRATEGY); this.name = name; this.scope = scope; this.analyzer = analyzer; - this.positionOffsetGap = positionOffsetGap; + this.positionIncrementGap = positionIncrementGap; } /** @@ -81,8 +81,8 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper { @Override public int getPositionIncrementGap(String fieldName) { - if (positionOffsetGap != Integer.MIN_VALUE) { - return positionOffsetGap; + if (positionIncrementGap != Integer.MIN_VALUE) { + return positionIncrementGap; } return super.getPositionIncrementGap(fieldName); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 597cf9b8a05..5bd733c48f1 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -19,11 +19,25 @@ package org.elasticsearch.index.engine; -import com.google.common.collect.Lists; -import org.apache.lucene.index.*; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter.IndexReaderWarmer; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LiveIndexWriterConfig; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.SearcherFactory; +import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; @@ -49,8 +63,11 @@ import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.OnGoingMerge; -import org.elasticsearch.index.shard.*; import org.elasticsearch.index.search.nested.IncludeNestedDocsQuery; +import org.elasticsearch.index.shard.ElasticsearchMergePolicy; +import org.elasticsearch.index.shard.MergeSchedulerConfig; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogCorruptedException; @@ -59,7 +76,12 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; @@ -1085,7 +1107,7 @@ public class InternalEngine extends Engine { newSearcher = searcher; } else { // figure out the newSearcher, with only the new readers that are relevant for us - List<IndexReader> readers = Lists.newArrayList(); + List<IndexReader> readers = new ArrayList<>(); for (LeafReaderContext newReaderContext : reader.leaves()) { if (isMergedSegment(newReaderContext.reader())) { // merged segments are already handled by IndexWriterConfig.setMergedSegmentWarmer diff --git a/core/src/main/java/org/elasticsearch/index/get/GetResult.java b/core/src/main/java/org/elasticsearch/index/get/GetResult.java index 996de8cf1bf..7bee6e4b824 100644 --- a/core/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/core/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.get; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; @@ -34,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -208,8 +208,8 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent { } public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { - List<GetField> metaFields = Lists.newArrayList(); - List<GetField> otherFields = Lists.newArrayList(); + List<GetField> metaFields = new ArrayList<>(); + List<GetField> otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { for (GetField field : fields.values()) { if (field.getValues().isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperForType.java similarity index 63% rename from core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java rename to core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperForType.java index c2931df6003..1957f7e9aa5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperForType.java @@ -17,20 +17,22 @@ * under the License. */ -package org.elasticsearch.action.admin.indices.cache.clear; +package org.elasticsearch.index.mapper; -import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; -import org.elasticsearch.index.shard.ShardId; +public class DocumentMapperForType { + private final DocumentMapper documentMapper; + private final Mapping mapping; -/** - * - */ -class ShardClearIndicesCacheResponse extends BroadcastShardResponse { - - ShardClearIndicesCacheResponse() { + public DocumentMapperForType(DocumentMapper documentMapper, Mapping mapping) { + this.mapping = mapping; + this.documentMapper = documentMapper; } - ShardClearIndicesCacheResponse(ShardId shardId) { - super(shardId); + public DocumentMapper getDocumentMapper() { + return documentMapper; } -} \ No newline at end of file + + public Mapping getMapping() { + return mapping; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 2f68003cc42..2551b0774a8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -100,33 +100,38 @@ class DocumentParser implements Closeable { context.reset(parser, new ParseContext.Document(), source); // will result in START_OBJECT - int countDownTokens = 0; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new MapperParsingException("Malformed content, must start with an object"); } + boolean emptyDoc = false; - token = parser.nextToken(); - if (token == XContentParser.Token.END_OBJECT) { - // empty doc, we can handle it... - emptyDoc = true; - } else if (token != XContentParser.Token.FIELD_NAME) { - throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist"); + if (mapping.root.isEnabled()) { + token = parser.nextToken(); + if (token == XContentParser.Token.END_OBJECT) { + // empty doc, we can handle it... + emptyDoc = true; + } else if (token != XContentParser.Token.FIELD_NAME) { + throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist"); + } } for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { metadataMapper.preParse(context); } - if (!emptyDoc) { + if (mapping.root.isEnabled() == false) { + // entire type is disabled + parser.skipChildren(); + } else if (emptyDoc == false) { Mapper update = parseObject(context, mapping.root); if (update != null) { context.addDynamicMappingsUpdate(update); } } - for (int i = 0; i < countDownTokens; i++) { - parser.nextToken(); + for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { + metadataMapper.postParse(context); } // try to parse the next token, this should be null if the object is ended properly @@ -135,12 +140,11 @@ class DocumentParser implements Closeable { && source.parser() == null && parser != null) { // only check for end of tokens if we created the parser here token = parser.nextToken(); - assert token == null; // double check, in tests, that we didn't end parsing early + if (token != null) { + throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token); + } } - for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { - metadataMapper.postParse(context); - } } catch (Throwable e) { // if its already a mapper parsing exception, no need to wrap it... if (e instanceof MapperParsingException) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index db2d0b20390..4300e4cc858 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -26,24 +26,18 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; @@ -65,12 +59,7 @@ import org.elasticsearch.script.ScriptService; import java.io.Closeable; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -395,16 +384,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable { * Returns the document mapper created, including a mapping update if the * type has been dynamically created. */ - public Tuple<DocumentMapper, Mapping> documentMapperWithAutoCreate(String type) { + public DocumentMapperForType documentMapperWithAutoCreate(String type) { DocumentMapper mapper = mappers.get(type); if (mapper != null) { - return Tuple.tuple(mapper, null); + return new DocumentMapperForType(mapper, null); } if (!dynamic) { throw new TypeMissingException(index, type, "trying to auto create mapping, but dynamic mapping is disabled"); } mapper = parse(type, null, true); - return Tuple.tuple(mapper, mapper.mapping()); + return new DocumentMapperForType(mapper, mapper.mapping()); } /** diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index 5c07e8a9192..edf75621c1e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -19,10 +19,8 @@ package org.elasticsearch.index.mapper; -import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.ObjectObjectHashMap; -import com.google.common.collect.Lists; - +import com.carrotsearch.hppc.ObjectObjectMap; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -56,7 +54,7 @@ public abstract class ParseContext { private ObjectObjectMap<Object, IndexableField> keyedFields; private Document(String path, Document parent) { - fields = Lists.newArrayList(); + fields = new ArrayList<>(); this.path = path; this.prefix = path.isEmpty() ? "" : path + "."; this.parent = parent; @@ -378,7 +376,7 @@ public abstract class ParseContext { private Document document; - private List<Document> documents = Lists.newArrayList(); + private List<Document> documents = new ArrayList<>(); @Nullable private final Settings indexSettings; @@ -411,7 +409,7 @@ public abstract class ParseContext { this.parser = parser; this.document = document; if (document != null) { - this.documents = Lists.newArrayList(); + this.documents = new ArrayList<>(); this.documents.add(document); } else { this.documents = null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 6297c6a2a78..dab41b4a78a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.mapper.core; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.analysis.Analyzer; @@ -53,6 +52,7 @@ import org.elasticsearch.search.suggest.context.ContextMapping; import org.elasticsearch.search.suggest.context.ContextMapping.ContextConfig; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -334,7 +334,7 @@ public class CompletionFieldMapper extends FieldMapper { String surfaceForm = null; BytesRef payload = null; long weight = -1; - List<String> inputs = Lists.newArrayListWithExpectedSize(4); + List<String> inputs = new ArrayList<>(4); SortedMap<String, ContextConfig> contextConfig = null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index a255a2f106f..cb89cb4973b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,6 +53,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "string"; + private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; public static class Defaults { public static final MappedFieldType FIELD_TYPE = new StringFieldType(); @@ -62,15 +64,38 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc // NOTE, when adding defaults here, make sure you add them in the builder public static final String NULL_VALUE = null; - public static final int POSITION_OFFSET_GAP = 0; + + /** + * Post 2.0 default for position_increment_gap. Set to 100 so that + * phrase queries of reasonably high slop will not match across field + * values. + */ + public static final int POSITION_INCREMENT_GAP = 100; + public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0; + public static final int IGNORE_ABOVE = -1; + + /** + * The default position_increment_gap for a particular version of Elasticsearch. + */ + public static int positionIncrementGap(Version version) { + if (version.before(Version.V_2_0_0_beta1)) { + return POSITION_INCREMENT_GAP_PRE_2_0; + } + return POSITION_INCREMENT_GAP; + } } public static class Builder extends FieldMapper.Builder<Builder, StringFieldMapper> { protected String nullValue = Defaults.NULL_VALUE; - protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP; + /** + * The distance between tokens from different values in the same field. + * POSITION_INCREMENT_GAP_USE_ANALYZER means default to the analyzer's + * setting which in turn defaults to Defaults.POSITION_INCREMENT_GAP. + */ + protected int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER; protected int ignoreAbove = Defaults.IGNORE_ABOVE; @@ -85,8 +110,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc return this; } - public Builder positionOffsetGap(int positionOffsetGap) { - this.positionOffsetGap = positionOffsetGap; + public Builder positionIncrementGap(int positionIncrementGap) { + this.positionIncrementGap = positionIncrementGap; return this; } @@ -102,10 +127,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override public StringFieldMapper build(BuilderContext context) { - if (positionOffsetGap > 0) { - fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionOffsetGap)); - fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionOffsetGap)); - fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionOffsetGap)); + if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { + fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap)); + fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap)); + fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); } // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants @@ -124,7 +149,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } setupFieldType(context); StringFieldMapper fieldMapper = new StringFieldMapper( - name, fieldType, defaultFieldType, positionOffsetGap, ignoreAbove, + name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; @@ -153,10 +178,15 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } builder.searchQuotedAnalyzer(analyzer); iterator.remove(); - } else if (propName.equals("position_offset_gap")) { - builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1)); + } else if (propName.equals("position_increment_gap") || + parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) { + int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); + if (newPositionIncrementGap < 0) { + throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); + } + builder.positionIncrementGap(newPositionIncrementGap); // we need to update to actual analyzers if they are not set in this case... - // so we can inject the position offset gap... + // so we can inject the position increment gap... if (builder.fieldType().indexAnalyzer() == null) { builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); } @@ -213,17 +243,17 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc } private Boolean includeInAll; - private int positionOffsetGap; + private int positionIncrementGap; private int ignoreAbove; protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - int positionOffsetGap, int ignoreAbove, + int positionIncrementGap, int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); } - this.positionOffsetGap = positionOffsetGap; + this.positionIncrementGap = positionIncrementGap; this.ignoreAbove = ignoreAbove; } @@ -251,8 +281,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc return true; } - public int getPositionOffsetGap() { - return this.positionOffsetGap; + public int getPositionIncrementGap() { + return this.positionIncrementGap; } public int getIgnoreAbove() { @@ -354,8 +384,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc builder.field("include_in_all", false); } - if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) { - builder.field("position_offset_gap", positionOffsetGap); + if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { + builder.field("position_increment_gap", positionIncrementGap); } NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer(); if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 70c3276a56f..3ff0611fca0 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -58,7 +58,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.object; import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; @@ -129,7 +128,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, protected Boolean includeInAll; - protected final List<Mapper.Builder> mappersBuilders = newArrayList(); + protected final List<Mapper.Builder> mappersBuilders = new ArrayList<>(); public Builder(String name) { super(name); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 2ba2ada1570..cb0815aaee1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -19,9 +19,7 @@ package org.elasticsearch.index.mapper.object; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -29,14 +27,22 @@ import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.settings.IndexSettings; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; @@ -57,11 +63,11 @@ public class RootObjectMapper extends ObjectMapper { public static class Builder extends ObjectMapper.Builder<Builder, RootObjectMapper> { - protected final List<DynamicTemplate> dynamicTemplates = newArrayList(); + protected final List<DynamicTemplate> dynamicTemplates = new ArrayList<>(); // we use this to filter out seen date formats, because we might get duplicates during merging protected Set<String> seenDateFormats = Sets.newHashSet(); - protected List<FormatDateTimeFormatter> dynamicDateTimeFormatters = newArrayList(); + protected List<FormatDateTimeFormatter> dynamicDateTimeFormatters = new ArrayList<>(); protected boolean dateDetection = Defaults.DATE_DETECTION; protected boolean numericDetection = Defaults.NUMERIC_DETECTION; @@ -144,7 +150,7 @@ public class RootObjectMapper extends ObjectMapper { protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode) { if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) { - List<FormatDateTimeFormatter> dateTimeFormatters = newArrayList(); + List<FormatDateTimeFormatter> dateTimeFormatters = new ArrayList<>(); if (fieldNode instanceof List) { for (Object node1 : (List) fieldNode) { if (node1.toString().startsWith("epoch_")) { @@ -262,7 +268,7 @@ public class RootObjectMapper extends ObjectMapper { RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; if (!mergeResult.simulate()) { // merge them - List<DynamicTemplate> mergedTemplates = Lists.newArrayList(Arrays.asList(this.dynamicTemplates)); + List<DynamicTemplate> mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { boolean replaced = false; for (int i = 0; i < mergedTemplates.size(); i++) { diff --git a/core/src/main/java/org/elasticsearch/index/query/AndQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AndQueryBuilder.java index e0ecafca285..1d556635103 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AndQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AndQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -32,7 +31,7 @@ import java.util.ArrayList; @Deprecated public class AndQueryBuilder extends QueryBuilder { - private ArrayList<QueryBuilder> filters = Lists.newArrayList(); + private ArrayList<QueryBuilder> filters = new ArrayList<>(); private String queryName; diff --git a/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java index 8c1969de561..bb0e1cbbcd5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -51,7 +49,7 @@ public class AndQueryParser implements QueryParser { public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); - ArrayList<Query> queries = newArrayList(); + ArrayList<Query> queries = new ArrayList<>(); boolean queriesFound = false; String queryName = null; diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java index 6fa7faa85db..66ea85eb818 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java @@ -29,9 +29,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; /** @@ -59,7 +59,7 @@ public class BoolQueryParser implements QueryParser { float boost = 1.0f; String minimumShouldMatch = null; - List<BooleanClause> clauses = newArrayList(); + List<BooleanClause> clauses = new ArrayList<>(); boolean adjustPureNegative = true; String queryName = null; diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index ddf9d95d166..3724a05df9a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -24,8 +24,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; -import static com.google.common.collect.Lists.newArrayList; - /** * A query that generates the union of documents produced by its sub-queries, and that scores each document * with the maximum score for that document as produced by any sub-query, plus a tie breaking increment for any @@ -33,7 +31,7 @@ import static com.google.common.collect.Lists.newArrayList; */ public class DisMaxQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<DisMaxQueryBuilder> { - private ArrayList<QueryBuilder> queries = newArrayList(); + private ArrayList<QueryBuilder> queries = new ArrayList<>(); private float boost = -1; diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java index 2747387fbd7..dc901d66f83 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java @@ -26,10 +26,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -53,7 +52,7 @@ public class DisMaxQueryParser implements QueryParser { float boost = 1.0f; float tieBreaker = 0.0f; - List<Query> queries = newArrayList(); + List<Query> queries = new ArrayList<>(); boolean queriesFound = false; String queryName = null; diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index 27d14ecb137..400384bdd21 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -19,13 +19,12 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; - import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.List; public class GeoPolygonQueryBuilder extends QueryBuilder { @@ -34,7 +33,7 @@ public class GeoPolygonQueryBuilder extends QueryBuilder { private final String name; - private final List<GeoPoint> shell = Lists.newArrayList(); + private final List<GeoPoint> shell = new ArrayList<>(); private String queryName; diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java index 00401bc8ca8..e4cf677954b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; - import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; @@ -34,6 +32,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.search.geo.GeoPolygonQuery; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -68,7 +67,7 @@ public class GeoPolygonQueryParser implements QueryParser { String fieldName = null; - List<GeoPoint> shell = Lists.newArrayList(); + List<GeoPoint> shell = new ArrayList<>(); final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0); boolean coerce = false; diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java index d911005eb89..87a76689a67 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java @@ -21,17 +21,16 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiDocValues; -import org.apache.lucene.search.*; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.elasticsearch.common.ParseField; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -45,8 +44,8 @@ import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery; import org.elasticsearch.index.search.child.ChildrenQuery; import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; @@ -82,7 +81,7 @@ public class HasChildQueryParser implements QueryParser { int maxChildren = 0; int shortCircuitParentDocSet = 8192; String queryName = null; - Tuple<String, SubSearchContext> innerHits = null; + InnerHitsSubSearchContext innerHits = null; String currentFieldName = null; XContentParser.Token token; @@ -152,8 +151,8 @@ public class HasChildQueryParser implements QueryParser { if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, parseContext.mapperService(), childDocMapper); - String name = innerHits.v1() != null ? innerHits.v1() : childType; + InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), childDocMapper); + String name = innerHits.getName() != null ? innerHits.getName() : childType; parseContext.addInnerHits(name, parentChildInnerHits); } @@ -255,11 +254,9 @@ public class HasChildQueryParser implements QueryParser { throw new IllegalArgumentException("Search context is required to be set"); } + IndexSearcher indexSearcher = searchContext.searcher(); String joinField = ParentFieldMapper.joinField(parentType); - IndexReader indexReader = searchContext.searcher().getIndexReader(); - IndexSearcher indexSearcher = new IndexSearcher(indexReader); - indexSearcher.setQueryCache(null); - IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader); + IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader()); MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType); return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren); } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java index 3d3a6625c7d..67422d0aa2f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java @@ -18,15 +18,10 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.Query; +import org.apache.lucene.search.*; import org.elasticsearch.Version; -import org.apache.lucene.search.QueryWrapperFilter; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -39,7 +34,7 @@ import org.elasticsearch.index.search.child.ParentConstantScoreQuery; import org.elasticsearch.index.search.child.ParentQuery; import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import java.io.IOException; import java.util.HashSet; @@ -73,7 +68,7 @@ public class HasParentQueryParser implements QueryParser { String parentType = null; boolean score = false; String queryName = null; - Tuple<String, SubSearchContext> innerHits = null; + InnerHitsSubSearchContext innerHits = null; String currentFieldName = null; XContentParser.Token token; @@ -146,7 +141,7 @@ public class HasParentQueryParser implements QueryParser { return query; } - static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple<String, SubSearchContext> innerHits) throws IOException { + static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, InnerHitsSubSearchContext innerHits) throws IOException { DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType); if (parentDocMapper == null) { throw new QueryParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType @@ -155,8 +150,8 @@ public class HasParentQueryParser implements QueryParser { if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, parseContext.mapperService(), parentDocMapper); - String name = innerHits.v1() != null ? innerHits.v1() : parentType; + InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), parentDocMapper); + String name = innerHits.getName() != null ? innerHits.getName() : parentType; parseContext.addInnerHits(name, parentChildInnerHits); } diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index d56ce40459b..98c3e2b5bf9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.queries.TermsQuery; @@ -44,7 +43,9 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -163,7 +164,7 @@ public class MoreLikeThisQueryParser implements QueryParser { } mltQuery.setStopWords(stopWords); } else if ("fields".equals(currentFieldName)) { - moreLikeFields = Lists.newLinkedList(); + moreLikeFields = new LinkedList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String field = parser.text(); MappedFieldType fieldType = parseContext.fieldMapper(field); @@ -222,7 +223,7 @@ public class MoreLikeThisQueryParser implements QueryParser { // set like text fields boolean useDefaultField = (moreLikeFields == null); if (useDefaultField) { - moreLikeFields = Lists.newArrayList(parseContext.defaultField()); + moreLikeFields = Collections.singletonList(parseContext.defaultField()); } // possibly remove unsupported fields removeUnsupportedFields(moreLikeFields, analyzer, failOnUnsupportedField); diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index d9a07514356..9059865bad7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.hppc.ObjectFloatHashMap; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; @@ -29,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.search.MatchQuery; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -151,7 +151,7 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue * Constructs a new text query. */ public MultiMatchQueryBuilder(Object text, String... fields) { - this.fields = Lists.newArrayList(); + this.fields = new ArrayList<>(); this.fields.addAll(Arrays.asList(fields)); this.text = text; } diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java index 4dc71f93393..e14720bd8d1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java @@ -26,14 +26,13 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import java.io.IOException; @@ -120,7 +119,7 @@ public class NestedQueryParser implements QueryParser { public static class ToBlockJoinQueryBuilder extends NestedInnerQueryParseSupport { private ScoreMode scoreMode; - private Tuple<String, SubSearchContext> innerHits; + private InnerHitsSubSearchContext innerHits; public ToBlockJoinQueryBuilder(QueryParseContext parseContext) throws IOException { super(parseContext); @@ -130,7 +129,7 @@ public class NestedQueryParser implements QueryParser { this.scoreMode = scoreMode; } - public void setInnerHits(Tuple<String, SubSearchContext> innerHits) { + public void setInnerHits(InnerHitsSubSearchContext innerHits) { this.innerHits = innerHits; } @@ -152,8 +151,8 @@ public class NestedQueryParser implements QueryParser { if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.v2(), parsedQuery, null, getParentObjectMapper(), nestedObjectMapper); - String name = innerHits.v1() != null ? innerHits.v1() : path; + InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, getParentObjectMapper(), nestedObjectMapper); + String name = innerHits.getName() != null ? innerHits.getName() : path; parseContext.addInnerHits(name, nestedInnerHits); } diff --git a/core/src/main/java/org/elasticsearch/index/query/OrQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/OrQueryBuilder.java index 19ad3e49eb0..e8ad48b7faf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/OrQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/OrQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -33,7 +32,7 @@ import java.util.Collections; @Deprecated public class OrQueryBuilder extends QueryBuilder { - private ArrayList<QueryBuilder> filters = Lists.newArrayList(); + private ArrayList<QueryBuilder> filters = new ArrayList<>(); private String queryName; diff --git a/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java index acc55e5f428..ff2c0b2c432 100644 --- a/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -51,7 +49,7 @@ public class OrQueryParser implements QueryParser { public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); - ArrayList<Query> queries = newArrayList(); + ArrayList<Query> queries = new ArrayList<>(); boolean queriesFound = false; String queryName = null; diff --git a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java index 56387d90751..d61fec713d2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,6 +39,8 @@ public class PrefixQueryParser implements QueryParser { public static final String NAME = "prefix"; + private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of prefix query"); + @Inject public PrefixQueryParser() { } @@ -84,7 +87,7 @@ public class PrefixQueryParser implements QueryParser { } } } else { - if ("_name".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) { queryName = parser.text(); } else { fieldName = currentFieldName; diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 7959dc1b144..c7a297e319b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -20,16 +20,14 @@ package org.elasticsearch.index.query; import com.carrotsearch.hppc.ObjectFloatHashMap; - import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; -import static com.google.common.collect.Lists.newArrayList; - /** * A query that parses a query string and runs it. There are two modes that this operates. The first, * when no field is added (using {@link #field(String)}, will run the query once and non prefixed fields @@ -118,7 +116,7 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu */ public QueryStringQueryBuilder field(String field) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(field); return this; @@ -129,7 +127,7 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu */ public QueryStringQueryBuilder field(String field, float boost) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(field); if (fieldsBoosts == null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java index 2a21d3d4595..929f7c16cd9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.hppc.ObjectFloatHashMap; -import com.google.common.collect.Lists; - import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; import org.apache.lucene.search.BooleanQuery; @@ -40,6 +38,7 @@ import org.elasticsearch.index.query.support.QueryParsers; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.ArrayList; import java.util.Locale; import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; @@ -102,7 +101,7 @@ public class QueryStringQueryParser implements QueryParser { fField = parser.text(); } if (qpSettings.fields() == null) { - qpSettings.fields(Lists.<String>newArrayList()); + qpSettings.fields(new ArrayList<String>()); } if (Regex.isSimpleMatchPattern(fField)) { @@ -222,8 +221,6 @@ public class QueryStringQueryParser implements QueryParser { qpSettings.queryString(org.apache.lucene.queryparser.classic.QueryParser.escape(qpSettings.queryString())); } - qpSettings.queryTypes(parseContext.queryTypes()); - MapperQueryParser queryParser = parseContext.queryParser(qpSettings); try { diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java index 6e4cd451ee9..355f9f2810b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java @@ -40,6 +40,7 @@ public class RangeQueryParser implements QueryParser { public static final String NAME = "range"; private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]"); + private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of range query"); @Inject public RangeQueryParser() { @@ -109,7 +110,7 @@ public class RangeQueryParser implements QueryParser { } } } else if (token.isValue()) { - if ("_name".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) { queryName = parser.text(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) { // ignore diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java index 533e3cdfe43..5844c17de3a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; @@ -41,6 +42,8 @@ public class RegexpQueryParser implements QueryParser { public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value(); + private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of regexp query"); + @Inject public RegexpQueryParser() { } @@ -96,7 +99,7 @@ public class RegexpQueryParser implements QueryParser { } } } else { - if ("_name".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) { queryName = parser.text(); } else { fieldName = currentFieldName; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java index 6ecf1b70bea..506bce20274 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java @@ -27,10 +27,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -57,7 +56,7 @@ public class SpanNearQueryParser implements QueryParser { boolean collectPayloads = true; String queryName = null; - List<SpanQuery> clauses = newArrayList(); + List<SpanQuery> clauses = new ArrayList<>(); String currentFieldName = null; XContentParser.Token token; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java index db58d4cca82..e28a9ccdccc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java @@ -27,10 +27,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * */ @@ -54,7 +53,7 @@ public class SpanOrQueryParser implements QueryParser { float boost = 1.0f; String queryName = null; - List<SpanQuery> clauses = newArrayList(); + List<SpanQuery> clauses = new ArrayList<>(); String currentFieldName = null; XContentParser.Token token; diff --git a/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java index be74053bc71..1c3876f3292 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java @@ -22,10 +22,10 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -37,6 +37,9 @@ public class TermQueryParser implements QueryParser { public static final String NAME = "term"; + private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of term query"); + private static final ParseField BOOST_FIELD = new ParseField("boost").withAllDeprecated("boost is not supported in short version of term query"); + @Inject public TermQueryParser() { } @@ -85,9 +88,9 @@ public class TermQueryParser implements QueryParser { } } } else if (token.isValue()) { - if ("_name".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) { queryName = parser.text(); - } else if ("boost".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) { boost = parser.floatValue(); } else { if (fieldName != null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java index db2fed37226..0d5ae097ab9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; - import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause.Occur; @@ -42,6 +40,7 @@ import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -86,7 +85,7 @@ public class TermsQueryParser implements QueryParser { boolean disableCoord = false; XContentParser.Token token; - List<Object> terms = Lists.newArrayList(); + List<Object> terms = new ArrayList<>(); String fieldName = null; float boost = 1f; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { diff --git a/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java b/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java index ae839c41d1c..b4d3e633c3e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java @@ -19,12 +19,12 @@ package org.elasticsearch.index.query.support; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement; import org.elasticsearch.search.fetch.source.FetchSourceParseElement; import org.elasticsearch.search.highlight.HighlighterParseElement; @@ -51,7 +51,7 @@ public class InnerHitsQueryParserHelper { this.fieldDataFieldsParseElement = fieldDataFieldsParseElement; } - public Tuple<String, SubSearchContext> parse(QueryParseContext parserContext) throws IOException, QueryParsingException { + public InnerHitsSubSearchContext parse(QueryParseContext parserContext) throws IOException, QueryParsingException { String fieldName = null; XContentParser.Token token; String innerHitName = null; @@ -74,7 +74,7 @@ public class InnerHitsQueryParserHelper { } catch (Exception e) { throw new QueryParsingException(parserContext, "Failed to parse [_inner_hits]", e); } - return new Tuple<>(innerHitName, subSearchContext); + return new InnerHitsSubSearchContext(innerHitName, subSearchContext); } public static void parseCommonInnerHitOptions(XContentParser parser, XContentParser.Token token, String fieldName, SubSearchContext subSearchContext, diff --git a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java index 63da8a1b217..49e1a218d0c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query.support; +import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.elasticsearch.common.bytes.BytesReference; @@ -26,7 +27,6 @@ import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; @@ -54,7 +54,7 @@ public class NestedInnerQueryParseSupport { protected boolean filterFound = false; protected BitDocIdSetFilter parentFilter; - protected BitDocIdSetFilter childFilter; + protected Filter childFilter; protected ObjectMapper nestedObjectMapper; private ObjectMapper parentObjectMapper; @@ -191,7 +191,7 @@ public class NestedInnerQueryParseSupport { } else { parentFilter = parseContext.bitsetFilter(objectMapper.nestedTypeFilter()); } - childFilter = parseContext.bitsetFilter(nestedObjectMapper.nestedTypeFilter()); + childFilter = nestedObjectMapper.nestedTypeFilter(); parentObjectMapper = parseContext.nestedScope().nextLevel(nestedObjectMapper); } diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java index 82ba6265f1c..55484996232 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java @@ -103,11 +103,8 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery { return new BooleanQuery().createWeight(searcher, needsScores); } - IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); - indexSearcher.setSimilarity(searcher.getSimilarity(true)); - indexSearcher.setQueryCache(null); ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType); - indexSearcher.search(childQuery, collector); + searcher.search(childQuery, collector); final long remaining = collector.foundParents(); if (remaining == 0) { diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java index ffa2e343251..c07ccbacbae 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java @@ -152,9 +152,6 @@ public final class ChildrenQuery extends IndexCacheableQuery { // No docs of the specified type exist on this shard return new BooleanQuery().createWeight(searcher, needsScores); } - IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); - indexSearcher.setSimilarity(searcher.getSimilarity(true)); - indexSearcher.setQueryCache(null); boolean abort = true; long numFoundParents; @@ -193,7 +190,7 @@ public final class ChildrenQuery extends IndexCacheableQuery { } } - indexSearcher.search(childQuery, collector); + searcher.search(childQuery, collector); numFoundParents = collector.foundParents(); if (numFoundParents == 0) { return new BooleanQuery().createWeight(searcher, needsScores); diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java index 31ad5f66cb9..bad39130e75 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java @@ -22,16 +22,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.FilteredDocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; +import org.apache.lucene.search.*; import org.apache.lucene.util.Bits; import org.apache.lucene.util.LongBitSet; import org.elasticsearch.common.lucene.IndexCacheableQuery; @@ -92,10 +83,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery { } ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType); - IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); - indexSearcher.setSimilarity(searcher.getSimilarity(true)); - indexSearcher.setQueryCache(null); - indexSearcher.search(parentQuery, collector); + searcher.search(parentQuery, collector); if (collector.parentCount() == 0) { return new BooleanQuery().createWeight(searcher, needsScores); diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java index 4f74d9ad73c..d574066e08d 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java @@ -129,10 +129,7 @@ public class ParentQuery extends IndexCacheableQuery { try { collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType); - IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader()); - indexSearcher.setSimilarity(searcher.getSimilarity(true)); - indexSearcher.setQueryCache(null); - indexSearcher.search(parentQuery, collector); + searcher.search(parentQuery, collector); if (collector.parentCount() == 0) { return new BooleanQuery().createWeight(searcher, needsScores); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/CommitPoints.java b/core/src/main/java/org/elasticsearch/index/shard/CommitPoints.java index a56a0f62c24..ff06850feb6 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/CommitPoints.java +++ b/core/src/main/java/org/elasticsearch/index/shard/CommitPoints.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -28,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; @@ -130,8 +130,8 @@ public class CommitPoints implements Iterable<CommitPoint> { long version = -1; String name = null; CommitPoint.Type type = null; - List<CommitPoint.FileInfo> indexFiles = Lists.newArrayList(); - List<CommitPoint.FileInfo> translogFiles = Lists.newArrayList(); + List<CommitPoint.FileInfo> indexFiles = new ArrayList<>(); + List<CommitPoint.FileInfo> translogFiles = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index 7e3435bd185..c8d0379d701 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -19,8 +19,12 @@ package org.elasticsearch.index.shard; -import com.google.common.collect.Lists; -import org.apache.lucene.index.*; +import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.MergeTrigger; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; import org.elasticsearch.common.logging.ESLogger; @@ -28,6 +32,7 @@ import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -153,7 +158,7 @@ public final class ElasticsearchMergePolicy extends MergePolicy { // TODO: Use IndexUpgradeMergePolicy instead. We should be comparing codecs, // for now we just assume every minor upgrade has a new format. logger.debug("Adding segment " + info.info.name + " to be upgraded"); - spec.add(new OneMerge(Lists.newArrayList(info))); + spec.add(new OneMerge(Collections.singletonList(info))); } // TODO: we could check IndexWriter.getMergingSegments and avoid adding merges that IW will just reject? diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index e60b046801c..10217983f40 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -21,17 +21,10 @@ package org.elasticsearch.index.shard; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; - import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; +import org.apache.lucene.search.*; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; @@ -48,7 +41,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; @@ -506,13 +498,13 @@ public class IndexShard extends AbstractIndexShardComponent { } } - static Engine.Create prepareCreate(Tuple<DocumentMapper, Mapping> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates, boolean autoGeneratedId) { + static Engine.Create prepareCreate(DocumentMapperForType docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates, boolean autoGeneratedId) { long startTime = System.nanoTime(); - ParsedDocument doc = docMapper.v1().parse(source); - if (docMapper.v2() != null) { - doc.addDynamicMappingsUpdate(docMapper.v2()); + ParsedDocument doc = docMapper.getDocumentMapper().parse(source); + if (docMapper.getMapping() != null) { + doc.addDynamicMappingsUpdate(docMapper.getMapping()); } - return new Engine.Create(docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId); + return new Engine.Create(docMapper.getDocumentMapper().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId); } public void create(Engine.Create create) { @@ -540,13 +532,13 @@ public class IndexShard extends AbstractIndexShardComponent { } } - static Engine.Index prepareIndex(Tuple<DocumentMapper, Mapping> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates) { + static Engine.Index prepareIndex(DocumentMapperForType docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates) { long startTime = System.nanoTime(); - ParsedDocument doc = docMapper.v1().parse(source); - if (docMapper.v2() != null) { - doc.addDynamicMappingsUpdate(docMapper.v2()); + ParsedDocument doc = docMapper.getDocumentMapper().parse(source); + if (docMapper.getMapping() != null) { + doc.addDynamicMappingsUpdate(docMapper.getMapping()); } - return new Engine.Index(docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates); + return new Engine.Index(docMapper.getDocumentMapper().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates); } /** @@ -573,7 +565,7 @@ public class IndexShard extends AbstractIndexShardComponent { public Engine.Delete prepareDelete(String type, String id, long version, VersionType versionType, Engine.Operation.Origin origin) { long startTime = System.nanoTime(); - final DocumentMapper documentMapper = docMapper(type).v1(); + final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); return new Engine.Delete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, origin, startTime, false); } @@ -1389,7 +1381,7 @@ public class IndexShard extends AbstractIndexShardComponent { return indexSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); } - private Tuple<DocumentMapper, Mapping> docMapper(String type) { + private DocumentMapperForType docMapper(String type) { return mapperService.documentMapperWithAutoCreate(type); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java index e22659cbd53..b2b26c12d19 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.shard; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; @@ -41,10 +42,18 @@ public final class ShardPath { private final String indexUUID; private final ShardId shardId; private final Path shardStatePath; + private final boolean isCustomDataPath; - - public ShardPath(Path path, Path shardStatePath, String indexUUID, ShardId shardId) { - this.path = path; + public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, String indexUUID, ShardId shardId) { + assert dataPath.getFileName().toString().equals(Integer.toString(shardId.id())) : "dataPath must end with the shard ID but didn't: " + dataPath.toString(); + assert shardStatePath.getFileName().toString().equals(Integer.toString(shardId.id())) : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString(); + assert dataPath.getParent().getFileName().toString().equals(shardId.getIndex()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString(); + assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndex()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString(); + if (isCustomDataPath && dataPath.equals(shardStatePath)) { + throw new IllegalArgumentException("shard state path must be different to the data path when using custom data paths"); + } + this.isCustomDataPath = isCustomDataPath; + this.path = dataPath; this.indexUUID = indexUUID; this.shardId = shardId; this.shardStatePath = shardStatePath; @@ -78,6 +87,30 @@ public final class ShardPath { return shardStatePath; } + /** + * Returns the data-path root for this shard. The root is a parent of {@link #getDataPath()} without the index name + * and the shard ID. + */ + public Path getRootDataPath() { + Path noIndexShardId = getDataPath().getParent().getParent(); + return isCustomDataPath ? noIndexShardId : noIndexShardId.getParent(); // also strip the indices folder + } + + /** + * Returns the state-path root for this shard. The root is a parent of {@link #getRootStatePath()} ()} without the index name + * and the shard ID. + */ + public Path getRootStatePath() { + return getShardStatePath().getParent().getParent().getParent(); // also strip the indices folder + } + + /** + * Returns <code>true</code> iff the data location is a custom data location and therefore outside of the nodes configured data paths. + */ + public boolean isCustomDataPath() { + return isCustomDataPath; + } + /** * This method walks through the nodes shard paths to find the data and state path for the given shard. If multiple * directories with a valid shard state exist the one with the highest version will be used. @@ -113,7 +146,7 @@ public final class ShardPath { dataPath = statePath; } logger.debug("{} loaded data path [{}], state path [{}]", shardId, dataPath, statePath); - return new ShardPath(dataPath, statePath, indexUUID, shardId); + return new ShardPath(NodeEnvironment.hasCustomDataPath(indexSettings), dataPath, statePath, indexUUID, shardId); } } @@ -166,19 +199,27 @@ public final class ShardPath { } public static ShardPath selectNewPathForShard(NodeEnvironment env, ShardId shardId, @IndexSettings Settings indexSettings, - long avgShardSizeInBytes, Iterable<IndexShard> shards) throws IOException { + long avgShardSizeInBytes, Map<Path,Integer> dataPathToShardCount) throws IOException { final Path dataPath; final Path statePath; - final String indexUUID = indexSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); - if (NodeEnvironment.hasCustomDataPath(indexSettings)) { dataPath = env.resolveCustomLocation(indexSettings, shardId); statePath = env.nodePaths()[0].resolve(shardId); } else { - Map<Path,Long> estReservedBytes = getEstimatedReservedBytes(env, avgShardSizeInBytes, shards); + long totFreeSpace = 0; + for (NodeEnvironment.NodePath nodePath : env.nodePaths()) { + totFreeSpace += nodePath.fileStore.getUsableSpace(); + } + + // TODO: this is a hack!! We should instead keep track of incoming (relocated) shards since we know + // how large they will be once they're done copying, instead of a silly guess for such cases: + + // Very rough heurisic of how much disk space we expect the shard will use over its lifetime, the max of current average + // shard size across the cluster and 5% of the total available free space on this node: + long estShardSizeInBytes = Math.max(avgShardSizeInBytes, (long) (totFreeSpace/20.0)); // TODO - do we need something more extensible? Yet, this does the job for now... final NodeEnvironment.NodePath[] paths = env.nodePaths(); @@ -187,10 +228,11 @@ public final class ShardPath { for (NodeEnvironment.NodePath nodePath : paths) { FileStore fileStore = nodePath.fileStore; long usableBytes = fileStore.getUsableSpace(); - Long reservedBytes = estReservedBytes.get(nodePath.path); - if (reservedBytes != null) { - // Deduct estimated reserved bytes from usable space: - usableBytes -= reservedBytes; + + // Deduct estimated reserved bytes from usable space: + Integer count = dataPathToShardCount.get(nodePath.path); + if (count != null) { + usableBytes -= estShardSizeInBytes * count; } if (usableBytes > maxUsableBytes) { maxUsableBytes = usableBytes; @@ -202,7 +244,9 @@ public final class ShardPath { dataPath = statePath; } - return new ShardPath(dataPath, statePath, indexUUID, shardId); + final String indexUUID = indexSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); + + return new ShardPath(NodeEnvironment.hasCustomDataPath(indexSettings), dataPath, statePath, indexUUID, shardId); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 46c03de09ce..890b70996cd 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -25,7 +25,6 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; @@ -67,7 +66,7 @@ public class TranslogRecoveryPerformer { this.indexCache = indexCache; } - protected Tuple<DocumentMapper, Mapping> docMapper(String type) { + protected DocumentMapperForType docMapper(String type) { return mapperService.documentMapperWithAutoCreate(type); // protected for testing } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index 042f3c3f5ef..3730b67e377 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.snapshots.blobstore; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.io.ByteStreams; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; @@ -43,7 +41,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -51,11 +48,14 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.snapshots.*; +import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; +import org.elasticsearch.index.snapshots.IndexShardSnapshotException; +import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException; +import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; @@ -63,15 +63,21 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositoryVerificationException; -import org.elasticsearch.repositories.blobstore.*; +import org.elasticsearch.repositories.blobstore.BlobStoreFormat; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.blobstore.ChecksumBlobStoreFormat; +import org.elasticsearch.repositories.blobstore.LegacyBlobStoreFormat; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.testBlobPrefix; /** @@ -318,7 +324,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements } // Build a list of snapshots that should be preserved - List<SnapshotFiles> newSnapshotsList = Lists.newArrayList(); + List<SnapshotFiles> newSnapshotsList = new ArrayList<>(); for (SnapshotFiles point : snapshots) { if (!point.snapshot().equals(snapshotId.getSnapshot())) { newSnapshotsList.add(point); @@ -352,7 +358,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements */ protected void finalize(List<SnapshotFiles> snapshots, int fileListGeneration, Map<String, BlobMetaData> blobs) { BlobStoreIndexShardSnapshots newSnapshots = new BlobStoreIndexShardSnapshots(snapshots); - List<String> blobsToDelete = newArrayList(); + List<String> blobsToDelete = new ArrayList<>(); // delete old index files first for (String blobName : blobs.keySet()) { // delete old file lists @@ -369,7 +375,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements throw new IndexShardSnapshotFailedException(shardId, "error deleting index files during cleanup, reason: " + e.getMessage(), e); } - blobsToDelete = newArrayList(); + blobsToDelete = new ArrayList<>(); // now go over all the blobs, and if they don't exists in a snapshot, delete them for (String blobName : blobs.keySet()) { // delete unused files @@ -459,7 +465,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements } // We couldn't load the index file - falling back to loading individual snapshots - List<SnapshotFiles> snapshots = Lists.newArrayList(); + List<SnapshotFiles> snapshots = new ArrayList<>(); for (String name : blobs.keySet()) { try { BlobStoreIndexShardSnapshot snapshot = null; @@ -524,11 +530,11 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements BlobStoreIndexShardSnapshots snapshots = tuple.v1(); int fileListGeneration = tuple.v2(); - final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = newArrayList(); + final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = new ArrayList<>(); int indexNumberOfFiles = 0; long indexTotalFilesSize = 0; - ArrayList<FileInfo> filesToSnapshot = newArrayList(); + ArrayList<FileInfo> filesToSnapshot = new ArrayList<>(); final Store.MetadataSnapshot metadata; // TODO apparently we don't use the MetadataSnapshot#.recoveryDiff(...) here but we should try { @@ -611,7 +617,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements // delete all files that are not referenced by any commit point // build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones - List<SnapshotFiles> newSnapshotsList = Lists.newArrayList(); + List<SnapshotFiles> newSnapshotsList = new ArrayList<>(); newSnapshotsList.add(new SnapshotFiles(snapshot.snapshot(), snapshot.indexFiles())); for (SnapshotFiles point : snapshots) { newSnapshotsList.add(point); @@ -807,7 +813,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements throw new IndexShardRestoreFailedException(shardId, "Can't restore corrupted shard", e); } - final List<FileInfo> filesToRecover = Lists.newArrayList(); + final List<FileInfo> filesToRecover = new ArrayList<>(); final Map<String, StoreFileMetaData> snapshotMetaData = new HashMap<>(); final Map<String, FileInfo> fileInfos = new HashMap<>(); for (final FileInfo fileInfo : snapshot.indexFiles()) { diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 0e997c14ec0..2c413c48ecd 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -29,14 +29,17 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.store.StoreFileMetaData; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; - /** * Shard snapshot metadata */ @@ -477,7 +480,7 @@ public class BlobStoreIndexShardSnapshot implements ToXContent, FromXContentBuil int numberOfFiles = 0; long totalSize = 0; - List<FileInfo> indexFiles = newArrayList(); + List<FileInfo> indexFiles = new ArrayList<>(); if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java index 19bf4ee3932..0a22b1ec553 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java @@ -24,15 +24,19 @@ import com.google.common.collect.ImmutableMap; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; /** @@ -68,7 +72,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To for (FileInfo fileInfo : snapshot.indexFiles()) { List<FileInfo> physicalFileList = physicalFiles.get(fileInfo.physicalName()); if (physicalFileList == null) { - physicalFileList = newArrayList(); + physicalFileList = new ArrayList<>(); physicalFiles.put(fileInfo.physicalName(), physicalFileList); } physicalFileList.add(newFiles.get(fileInfo.name())); @@ -90,7 +94,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To for (FileInfo fileInfo : snapshot.indexFiles()) { List<FileInfo> physicalFileList = physicalFiles.get(fileInfo.physicalName()); if (physicalFileList == null) { - physicalFileList = newArrayList(); + physicalFileList = new ArrayList<>(); physicalFiles.put(fileInfo.physicalName(), physicalFileList); } physicalFileList.add(files.get(fileInfo.name())); @@ -269,7 +273,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES) == false) { throw new ElasticsearchParseException("unknown array [{}]", currentFieldName); } - List<String> fileNames = newArrayList(); + List<String> fileNames = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { fileNames.add(parser.text()); } diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java index 0d107e19b46..cb34c1167d5 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java @@ -20,12 +20,7 @@ package org.elasticsearch.index.termvectors; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.MultiFields; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; +import org.apache.lucene.index.*; import org.apache.lucene.index.memory.MemoryIndex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.termvectors.TermVectorsFilter; @@ -38,20 +33,13 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetField; import org.elasticsearch.index.get.GetResult; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.settings.IndexSettings; @@ -61,14 +49,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.dfs.AggregatedDfs; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; +import java.util.*; import static org.elasticsearch.index.mapper.SourceToParse.source; @@ -308,10 +289,10 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { MapperService mapperService = indexShard.mapperService(); // TODO: make parsing not dynamically create fields not in the original mapping - Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(type); - ParsedDocument parsedDocument = docMapper.v1().parse(source(doc).index(index).type(type).flyweight(true)); - if (docMapper.v2() != null) { - parsedDocument.addDynamicMappingsUpdate(docMapper.v2()); + DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type); + ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).flyweight(true)); + if (docMapper.getMapping() != null) { + parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping()); } if (parsedDocument.dynamicMappingsUpdate() != null) { mappingUpdatedAction.updateMappingOnMasterSynchronously(index, type, parsedDocument.dynamicMappingsUpdate()); diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 3fe4d9d857a..9b1913e5854 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -32,8 +32,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasablePagedBytesReference; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.io.stream.*; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; @@ -231,7 +233,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } })) { long latestGeneration = -1; - List<Tuple<Path, Long>> filesToUpgrade = new ArrayList<>(); + List<PathWithGeneration> filesToUpgrade = new ArrayList<>(); for (Path path : stream) { Matcher matcher = parseLegacyIdPattern.matcher(path.getFileName().toString()); if (matcher.matches()) { @@ -239,7 +241,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (generation >= translogGeneration.translogFileGeneration) { latestGeneration = Math.max(translogGeneration.translogFileGeneration, generation); } - filesToUpgrade.add(new Tuple<>(path, generation)); + filesToUpgrade.add(new PathWithGeneration(path, generation)); } else { Matcher strict_matcher = PARSE_STRICT_ID_PATTERN.matcher(path.getFileName().toString()); if (strict_matcher.matches()) { @@ -250,17 +252,17 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (latestGeneration < translogGeneration.translogFileGeneration) { throw new IllegalStateException("latest found translog has a lower generation that the excepcted uncommitted " + translogGeneration.translogFileGeneration + " > " + latestGeneration); } - CollectionUtil.timSort(filesToUpgrade, new Comparator<Tuple<Path, Long>>() { + CollectionUtil.timSort(filesToUpgrade, new Comparator<PathWithGeneration>() { @Override - public int compare(Tuple<Path, Long> o1, Tuple<Path, Long> o2) { - long gen1 = o1.v2(); - long gen2 = o2.v2(); + public int compare(PathWithGeneration o1, PathWithGeneration o2) { + long gen1 = o1.getGeneration(); + long gen2 = o2.getGeneration(); return Long.compare(gen1, gen2); } }); - for (Tuple<Path, Long> pathAndGeneration : filesToUpgrade) { - final Path path = pathAndGeneration.v1(); - final long generation = pathAndGeneration.v2(); + for (PathWithGeneration pathAndGeneration : filesToUpgrade) { + final Path path = pathAndGeneration.getPath(); + final long generation = pathAndGeneration.getGeneration(); final Path target = path.resolveSibling(getFilename(generation)); logger.debug("upgrading translog copy file from {} to {}", path, target); Files.move(path, target, StandardCopyOption.ATOMIC_MOVE); @@ -1798,4 +1800,21 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return outstandingViews.size(); } + private static class PathWithGeneration { + private final Path path; + private final long generation; + + public PathWithGeneration(Path path, long generation) { + this.path = path; + this.generation = generation; + } + + public Path getPath() { + return path; + } + + public long getGeneration() { + return generation; + } + } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 43fdb3df675..4c3b1f5e6a4 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.CollectionUtil; @@ -38,7 +37,6 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.inject.Inject; @@ -106,6 +104,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; /** * @@ -124,7 +123,26 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i private final NodeEnvironment nodeEnv; private final TimeValue shardsClosedTimeout; - private volatile Map<String, Tuple<IndexService, Injector>> indices = ImmutableMap.of(); + private volatile Map<String, IndexServiceInjectorPair> indices = ImmutableMap.of(); + + static class IndexServiceInjectorPair { + private final IndexService indexService; + private final Injector injector; + + public IndexServiceInjectorPair(IndexService indexService, Injector injector) { + this.indexService = indexService; + this.injector = injector; + } + + public IndexService getIndexService() { + return indexService; + } + + public Injector getInjector() { + return injector; + } + } + private final Map<Index, List<PendingDelete>> pendingDeletes = new HashMap<>(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @@ -229,16 +247,16 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i } Map<Index, List<IndexShardStats>> statsByShard = Maps.newHashMap(); - for (Tuple<IndexService, Injector> value : indices.values()) { - IndexService indexService = value.v1(); + for (IndexServiceInjectorPair value : indices.values()) { + IndexService indexService = value.getIndexService(); for (IndexShard indexShard : indexService) { try { if (indexShard.routingEntry() == null) { continue; } - IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard, indexShard.routingEntry(), flags) }); + IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard, flags) }); if (!statsByShard.containsKey(indexService.index())) { - statsByShard.put(indexService.index(), Lists.<IndexShardStats>newArrayList(indexShardStats)); + statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats)); } else { statsByShard.get(indexService.index()).add(indexShardStats); } @@ -261,10 +279,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i @Override public Iterator<IndexService> iterator() { - return Iterators.transform(indices.values().iterator(), new Function<Tuple<IndexService, Injector>, IndexService>() { + return Iterators.transform(indices.values().iterator(), new Function<IndexServiceInjectorPair, IndexService>() { @Override - public IndexService apply(Tuple<IndexService, Injector> input) { - return input.v1(); + public IndexService apply(IndexServiceInjectorPair input) { + return input.getIndexService(); } }); } @@ -279,11 +297,11 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i */ @Nullable public IndexService indexService(String index) { - Tuple<IndexService, Injector> indexServiceInjectorTuple = indices.get(index); - if (indexServiceInjectorTuple == null) { + IndexServiceInjectorPair indexServiceInjectorPair = indices.get(index); + if (indexServiceInjectorPair == null) { return null; } else { - return indexServiceInjectorTuple.v1(); + return indexServiceInjectorPair.getIndexService(); } } @@ -352,7 +370,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i indicesLifecycle.afterIndexCreated(indexService); - indices = newMapBuilder(indices).put(index.name(), new Tuple<>(indexService, indexInjector)).immutableMap(); + indices = newMapBuilder(indices).put(index.name(), new IndexServiceInjectorPair(indexService, indexInjector)).immutableMap(); return indexService; } @@ -377,10 +395,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i } logger.debug("[{}] closing ... (reason [{}])", index, reason); - Map<String, Tuple<IndexService, Injector>> tmpMap = newHashMap(indices); - Tuple<IndexService, Injector> remove = tmpMap.remove(index); - indexService = remove.v1(); - indexInjector = remove.v2(); + Map<String, IndexServiceInjectorPair> tmpMap = newHashMap(indices); + IndexServiceInjectorPair remove = tmpMap.remove(index); + indexService = remove.getIndexService(); + indexInjector = remove.getInjector(); indices = ImmutableMap.copyOf(tmpMap); } @@ -488,7 +506,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i synchronized (this) { String indexName = metaData.index(); if (indices.containsKey(indexName)) { - String localUUid = indices.get(indexName).v1().indexUUID(); + String localUUid = indices.get(indexName).getIndexService().indexUUID(); throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]"); } if (clusterState.metaData().hasIndex(indexName) && (clusterState.nodes().localNode().masterNode() == true)) { @@ -589,9 +607,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i * @return true if the index can be deleted on this node */ public boolean canDeleteIndexContents(Index index, Settings indexSettings) { - final Tuple<IndexService, Injector> indexServiceInjectorTuple = this.indices.get(index.name()); + final IndexServiceInjectorPair indexServiceInjectorPair = this.indices.get(index.name()); if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false) { - if (indexServiceInjectorTuple == null && nodeEnv.hasNodeFile()) { + if (indexServiceInjectorPair == null && nodeEnv.hasNodeFile()) { return true; } } else { @@ -622,10 +640,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i } private boolean canDeleteShardContent(ShardId shardId, @IndexSettings Settings indexSettings) { - final Tuple<IndexService, Injector> indexServiceInjectorTuple = this.indices.get(shardId.getIndex()); + final IndexServiceInjectorPair indexServiceInjectorPair = this.indices.get(shardId.getIndex()); if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false) { - if (indexServiceInjectorTuple != null && nodeEnv.hasNodeFile()) { - final IndexService indexService = indexServiceInjectorTuple.v1(); + if (indexServiceInjectorPair != null && nodeEnv.hasNodeFile()) { + final IndexService indexService = indexServiceInjectorPair.getIndexService(); return indexService.hasShard(shardId.id()) == false; } else if (nodeEnv.hasNodeFile()) { if (NodeEnvironment.hasCustomDataPath(indexSettings)) { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java index 2b28842698d..9ee45b21def 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices; -import com.google.common.collect.Lists; import org.apache.lucene.index.IndexReader; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -29,10 +28,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; @@ -100,7 +100,7 @@ public final class IndicesWarmer extends AbstractComponent { } indexShard.warmerService().onPreWarm(); long time = System.nanoTime(); - final List<TerminationHandle> terminationHandles = Lists.newArrayList(); + final List<TerminationHandle> terminationHandles = new ArrayList<>(); // get a handle on pending tasks for (final Listener listener : listeners) { if (topReader) { diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index d7d625090aa..a184f0a0888 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -19,9 +19,7 @@ package org.elasticsearch.indices; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -51,6 +49,7 @@ import org.elasticsearch.index.suggest.stats.SuggestStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -175,7 +174,7 @@ public class NodeIndicesStats implements Streamable, ToXContent { for (int i = 0; i < entries; i++) { Index index = Index.readIndexName(in); int indexShardListSize = in.readVInt(); - List<IndexShardStats> indexShardStats = Lists.newArrayListWithCapacity(indexShardListSize); + List<IndexShardStats> indexShardStats = new ArrayList<>(indexShardListSize); for (int j = 0; j < indexShardListSize; j++) { indexShardStats.add(IndexShardStats.readIndexShardStats(in)); } diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 73877a2f631..b6422864fc5 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -28,13 +28,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.node.settings.NodeSettingsService; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; -import static com.google.common.collect.Lists.newArrayList; - /** * CircuitBreakerService that attempts to redistribute space between breakers * if tripped @@ -186,7 +185,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { @Override public AllCircuitBreakerStats stats() { long parentEstimated = 0; - List<CircuitBreakerStats> allStats = newArrayList(); + List<CircuitBreakerStats> allStats = new ArrayList<>(); // Gather the "estimated" count for the parent breaker by adding the // estimations for each individual breaker for (CircuitBreaker breaker : this.breakers.values()) { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index c669a9d6d59..30cab946d2e 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -22,7 +22,6 @@ package org.elasticsearch.indices.cluster; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.base.Predicate; -import com.google.common.collect.Lists; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -34,7 +33,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -50,7 +53,12 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettingsService; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardRecoveryException; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.index.shard.StoreRecoveryService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; @@ -58,6 +66,7 @@ import org.elasticsearch.indices.recovery.RecoveryStatus; import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -335,7 +344,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic // we only create / update here continue; } - List<String> typesToRefresh = Lists.newArrayList(); + List<String> typesToRefresh = new ArrayList<>(); String index = indexMetaData.index(); IndexService indexService = indicesService.indexService(index); if (indexService == null) { diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index 430b7634672..67d3e8eadfc 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices.memory; -import com.google.common.collect.Lists; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -39,7 +38,12 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; -import java.util.*; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ScheduledFuture; /** @@ -158,7 +162,7 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin changes.addAll(purgeDeletedAndClosedShards()); - final List<IndexShard> activeToInactiveIndexingShards = Lists.newArrayList(); + final List<IndexShard> activeToInactiveIndexingShards = new ArrayList<>(); final int activeShards = updateShardStatuses(changes, activeToInactiveIndexingShards); for (IndexShard indexShard : activeToInactiveIndexingShards) { // update inactive indexing buffer size diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java index b360130d271..7ec59a76ed8 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java @@ -19,12 +19,12 @@ package org.elasticsearch.indices.recovery; -import com.google.common.collect.Lists; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.transport.TransportResponse; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -32,10 +32,10 @@ import java.util.List; */ class RecoveryResponse extends TransportResponse { - List<String> phase1FileNames = Lists.newArrayList(); - List<Long> phase1FileSizes = Lists.newArrayList(); - List<String> phase1ExistingFileNames = Lists.newArrayList(); - List<Long> phase1ExistingFileSizes = Lists.newArrayList(); + List<String> phase1FileNames = new ArrayList<>(); + List<Long> phase1FileSizes = new ArrayList<>(); + List<String> phase1ExistingFileNames = new ArrayList<>(); + List<Long> phase1ExistingFileSizes = new ArrayList<>(); long phase1TotalSize; long phase1ExistingTotalSize; long phase1Time; @@ -53,23 +53,23 @@ class RecoveryResponse extends TransportResponse { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); - phase1FileNames = Lists.newArrayListWithCapacity(size); + phase1FileNames = new ArrayList<>(size); for (int i = 0; i < size; i++) { phase1FileNames.add(in.readString()); } size = in.readVInt(); - phase1FileSizes = Lists.newArrayListWithCapacity(size); + phase1FileSizes = new ArrayList<>(size); for (int i = 0; i < size; i++) { phase1FileSizes.add(in.readVLong()); } size = in.readVInt(); - phase1ExistingFileNames = Lists.newArrayListWithCapacity(size); + phase1ExistingFileNames = new ArrayList<>(size); for (int i = 0; i < size; i++) { phase1ExistingFileNames.add(in.readString()); } size = in.readVInt(); - phase1ExistingFileSizes = Lists.newArrayListWithCapacity(size); + phase1ExistingFileSizes = new ArrayList<>(size); for (int i = 0; i < size; i++) { phase1ExistingFileSizes.add(in.readVLong()); } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 9d0439dc167..8d913c6937f 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.recovery; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; @@ -57,6 +56,7 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @@ -558,7 +558,7 @@ public class RecoverySourceHandler { int ops = 0; long size = 0; int totalOperations = 0; - final List<Translog.Operation> operations = Lists.newArrayList(); + final List<Translog.Operation> operations = new ArrayList<>(); Translog.Operation operation; try { operation = snapshot.next(); // this ex should bubble up diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index b320c98568a..345c179bc3b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices.recovery; -import com.google.common.collect.Lists; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 1d4eefdcb05..d72e145d526 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -19,12 +19,15 @@ package org.elasticsearch.indices.store; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.*; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -51,6 +54,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -103,8 +107,8 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T @Override protected NodesStoreFilesMetaData newResponse(Request request, AtomicReferenceArray responses) { - final List<NodeStoreFilesMetaData> nodeStoreFilesMetaDatas = Lists.newArrayList(); - final List<FailedNodeException> failures = Lists.newArrayList(); + final List<NodeStoreFilesMetaData> nodeStoreFilesMetaDatas = new ArrayList<>(); + final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeStoreFilesMetaData) { // will also filter out null response for unallocated ones diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index e92563723c6..6096f29262f 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -19,12 +19,17 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.IntObjectHashMap; -import com.google.common.collect.Lists; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.memory.ExtendedMemoryIndex; import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ElasticsearchParseException; @@ -38,7 +43,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -59,7 +63,11 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.ParsedQuery; @@ -85,6 +93,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.percolator.QueryCollector.*; @@ -275,10 +284,10 @@ public class PercolatorService extends AbstractComponent { } MapperService mapperService = documentIndexService.mapperService(); - Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(request.documentType()); - doc = docMapper.v1().parse(source(parser).index(index).type(request.documentType()).flyweight(true)); - if (docMapper.v2() != null) { - doc.addDynamicMappingsUpdate(docMapper.v2()); + DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType()); + doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true)); + if (docMapper.getMapping() != null) { + doc.addDynamicMappingsUpdate(docMapper.getMapping()); } if (doc.dynamicMappingsUpdate() != null) { mappingUpdatedAction.updateMappingOnMasterSynchronously(request.shardId().getIndex(), request.documentType(), doc.dynamicMappingsUpdate()); @@ -384,8 +393,8 @@ public class PercolatorService extends AbstractComponent { try { parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc); MapperService mapperService = documentIndexService.mapperService(); - Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(type); - doc = docMapper.v1().parse(source(parser).index(index).type(type).flyweight(true)); + DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type); + doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(type).flyweight(true)); if (context.highlight() != null) { doc.setSource(fetchedDoc); @@ -847,7 +856,7 @@ public class PercolatorService extends AbstractComponent { if (aggregations != null) { List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators(); if (pipelineAggregators != null) { - List<InternalAggregation> newAggs = new ArrayList<>(Lists.transform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION)); + List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION)); for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) { InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(bigArrays, scriptService)); diff --git a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java index 46ece9335db..dfa9f4be05f 100644 --- a/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java +++ b/core/src/main/java/org/elasticsearch/percolator/QueryCollector.java @@ -19,22 +19,14 @@ package org.elasticsearch.percolator; import com.carrotsearch.hppc.FloatArrayList; - import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.*; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/repositories/Repository.java b/core/src/main/java/org/elasticsearch/repositories/Repository.java index 2bcdccb5565..adde0ed1c89 100644 --- a/core/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/core/src/main/java/org/elasticsearch/repositories/Repository.java @@ -130,4 +130,10 @@ public interface Repository extends LifecycleComponent<Repository> { */ void endVerification(String verificationToken); + /** + * Returns true if the repository supports only read operations + * @return true if the repository is read/only + */ + boolean readOnly(); + } diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 35a168428bf..11b898d85f7 100644 --- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -31,17 +31,22 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; import org.elasticsearch.repositories.RepositoriesService.VerifyResponse; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; -import static com.google.common.collect.Lists.newArrayList; - public class VerifyNodeRepositoryAction extends AbstractComponent { public static final String ACTION_NAME = "internal:admin/repository/verify"; @@ -68,7 +73,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent { final DiscoveryNode localNode = discoNodes.localNode(); final ObjectContainer<DiscoveryNode> masterAndDataNodes = discoNodes.masterAndDataNodes().values(); - final List<DiscoveryNode> nodes = newArrayList(); + final List<DiscoveryNode> nodes = new ArrayList<>(); for (ObjectCursor<DiscoveryNode> cursor : masterAndDataNodes) { DiscoveryNode node = cursor.value; nodes.add(node); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 3e9fbaaad09..fd712bff928 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -56,7 +56,12 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.RepositoryVerificationException; -import org.elasticsearch.snapshots.*; +import org.elasticsearch.snapshots.InvalidSnapshotNameException; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotCreationException; +import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotMissingException; +import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.FileNotFoundException; import java.io.IOException; @@ -68,8 +73,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; - /** * BlobStore - based implementation of Snapshot Repository * <p/> @@ -168,6 +171,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep private LegacyBlobStoreFormat<Snapshot> snapshotLegacyFormat; + private final boolean readOnly; + /** * Constructs new BlobStoreRepository * @@ -181,6 +186,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep this.indexShardRepository = (BlobStoreIndexShardRepository) indexShardRepository; snapshotRateLimiter = getRateLimiter(repositorySettings, "max_snapshot_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB)); restoreRateLimiter = getRateLimiter(repositorySettings, "max_restore_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB)); + readOnly = repositorySettings.settings().getAsBoolean("readonly", false); } /** @@ -260,6 +266,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep */ @Override public void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData metaData) { + if (readOnly()) { + throw new RepositoryException(this.repositoryName, "cannot create snapshot in a readonly repository"); + } try { if (snapshotFormat.exists(snapshotsBlobContainer, snapshotId.getSnapshot()) || snapshotLegacyFormat.exists(snapshotsBlobContainer, snapshotId.getSnapshot())) { @@ -283,6 +292,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep */ @Override public void deleteSnapshot(SnapshotId snapshotId) { + if (readOnly()) { + throw new RepositoryException(this.repositoryName, "cannot delete snapshot from a readonly repository"); + } List<String> indices = Collections.EMPTY_LIST; Snapshot snapshot = null; try { @@ -380,7 +392,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep @Override public List<SnapshotId> snapshots() { try { - List<SnapshotId> snapshots = newArrayList(); + List<SnapshotId> snapshots = new ArrayList<>(); Map<String, BlobMetaData> blobs; try { blobs = snapshotsBlobContainer.listBlobsByPrefix(COMMON_SNAPSHOT_PREFIX); @@ -622,16 +634,21 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep @Override public String startVerification() { try { - String seed = Strings.randomBase64UUID(); - byte[] testBytes = Strings.toUTF8Bytes(seed); - BlobContainer testContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); - String blobName = "master.dat"; - try (OutputStream outputStream = testContainer.createOutput(blobName + "-temp")) { - outputStream.write(testBytes); + if (readOnly()) { + // It's readonly - so there is not much we can do here to verify it + return null; + } else { + String seed = Strings.randomBase64UUID(); + byte[] testBytes = Strings.toUTF8Bytes(seed); + BlobContainer testContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); + String blobName = "master.dat"; + try (OutputStream outputStream = testContainer.createOutput(blobName + "-temp")) { + outputStream.write(testBytes); + } + // Make sure that move is supported + testContainer.move(blobName + "-temp", blobName); + return seed; } - // Make sure that move is supported - testContainer.move(blobName + "-temp", blobName); - return seed; } catch (IOException exp) { throw new RepositoryVerificationException(repositoryName, "path " + basePath() + " is not accessible on master node", exp); } @@ -639,6 +656,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep @Override public void endVerification(String seed) { + if (readOnly()) { + throw new UnsupportedOperationException("shouldn't be called"); + } try { blobStore().delete(basePath().add(testBlobPrefix(seed))); } catch (IOException exp) { @@ -649,4 +669,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep public static String testBlobPrefix(String seed) { return TESTS_FILE + seed; } + + @Override + public boolean readOnly() { + return readOnly; + } } diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index 42a27c1dcd4..922c4878466 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -126,17 +126,6 @@ public class URLRepository extends BlobStoreRepository { } } - @Override - public String startVerification() { - //TODO: #7831 Add check that URL exists and accessible - return null; - } - - @Override - public void endVerification(String seed) { - throw new UnsupportedOperationException("shouldn't be called"); - } - /** * Makes sure that the url is white listed or if it points to the local file system it matches one on of the root path in path.repo */ @@ -168,4 +157,9 @@ public class URLRepository extends BlobStoreRepository { throw new RepositoryException(repositoryName, "unsupported url protocol [" + protocol + "] from URL [" + url + "]"); } + @Override + public boolean readOnly() { + return true; + } + } diff --git a/core/src/main/java/org/elasticsearch/rest/RestModule.java b/core/src/main/java/org/elasticsearch/rest/RestModule.java index a5d6e1be7c5..e7949172d0a 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestModule.java +++ b/core/src/main/java/org/elasticsearch/rest/RestModule.java @@ -19,11 +19,11 @@ package org.elasticsearch.rest; -import com.google.common.collect.Lists; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.action.RestActionModule; +import java.util.ArrayList; import java.util.List; /** @@ -32,7 +32,7 @@ import java.util.List; public class RestModule extends AbstractModule { private final Settings settings; - private List<Class<? extends BaseRestHandler>> restPluginsActions = Lists.newArrayList(); + private List<Class<? extends BaseRestHandler>> restPluginsActions = new ArrayList<>(); public void addRestAction(Class<? extends BaseRestHandler> restAction) { restPluginsActions.add(restAction); diff --git a/core/src/main/java/org/elasticsearch/rest/RestResponse.java b/core/src/main/java/org/elasticsearch/rest/RestResponse.java index 80ad0e16a37..7946785bc97 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -19,12 +19,15 @@ package org.elasticsearch.rest; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * @@ -58,7 +61,7 @@ public abstract class RestResponse { for (String key : headerKeySet) { List<String> values = customHeaders.get(key); if (values == null) { - values = Lists.newArrayList(); + values = new ArrayList<>(); customHeaders.put(key, values); } values.addAll(ex.getHeader(key)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java b/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java index 0764ca2f536..02bde5f74d5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestActionModule.java @@ -19,8 +19,6 @@ package org.elasticsearch.rest.action; -import com.google.common.collect.Lists; - import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.rest.BaseRestHandler; @@ -67,10 +65,10 @@ import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction; import org.elasticsearch.rest.action.admin.indices.optimize.RestOptimizeAction; import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction; import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction; -import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction; import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction; import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction; +import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction; import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction; import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; @@ -83,7 +81,20 @@ import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarme import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction; import org.elasticsearch.rest.action.bulk.RestBulkAction; -import org.elasticsearch.rest.action.cat.*; +import org.elasticsearch.rest.action.cat.AbstractCatAction; +import org.elasticsearch.rest.action.cat.RestAliasAction; +import org.elasticsearch.rest.action.cat.RestAllocationAction; +import org.elasticsearch.rest.action.cat.RestCatAction; +import org.elasticsearch.rest.action.cat.RestFielddataAction; +import org.elasticsearch.rest.action.cat.RestHealthAction; +import org.elasticsearch.rest.action.cat.RestIndicesAction; +import org.elasticsearch.rest.action.cat.RestMasterAction; +import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; +import org.elasticsearch.rest.action.cat.RestNodesAction; +import org.elasticsearch.rest.action.cat.RestPluginsAction; +import org.elasticsearch.rest.action.cat.RestSegmentsAction; +import org.elasticsearch.rest.action.cat.RestShardsAction; +import org.elasticsearch.rest.action.cat.RestThreadPoolAction; import org.elasticsearch.rest.action.delete.RestDeleteAction; import org.elasticsearch.rest.action.explain.RestExplainAction; import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction; @@ -110,13 +121,14 @@ import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; import org.elasticsearch.rest.action.update.RestUpdateAction; +import java.util.ArrayList; import java.util.List; /** * */ public class RestActionModule extends AbstractModule { - private List<Class<? extends BaseRestHandler>> restPluginsActions = Lists.newArrayList(); + private List<Class<? extends BaseRestHandler>> restPluginsActions = new ArrayList<>(); public RestActionModule(List<Class<? extends BaseRestHandler>> restPluginsActions) { this.restPluginsActions = restPluginsActions; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index edfab2da1c1..57ceb21f41e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.rest.action.admin.indices.analyze; -import com.google.common.collect.Lists; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.client.Client; @@ -29,11 +28,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -95,7 +98,7 @@ public class RestAnalyzeAction extends BaseRestHandler { } else if ("text".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.text(parser.text()); } else if ("text".equals(currentFieldName) && token == XContentParser.Token.START_ARRAY) { - List<String> texts = Lists.newArrayList(); + List<String> texts = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { throw new IllegalArgumentException(currentFieldName + " array element should only contain text"); @@ -110,7 +113,7 @@ public class RestAnalyzeAction extends BaseRestHandler { } else if ("tokenizer".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.tokenizer(parser.text()); } else if (("token_filters".equals(currentFieldName) || "filters".equals(currentFieldName)) && token == XContentParser.Token.START_ARRAY) { - List<String> filters = Lists.newArrayList(); + List<String> filters = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { throw new IllegalArgumentException(currentFieldName + " array element should only contain token filter's name"); @@ -119,7 +122,7 @@ public class RestAnalyzeAction extends BaseRestHandler { } analyzeRequest.tokenFilters(filters.toArray(Strings.EMPTY_ARRAY)); } else if ("char_filters".equals(currentFieldName) && token == XContentParser.Token.START_ARRAY) { - List<String> charFilters = Lists.newArrayList(); + List<String> charFilters = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { throw new IllegalArgumentException(currentFieldName + " array element should only contain char filter's name"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java index 9d58f8f74df..4c9b3de5c79 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.cat; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; @@ -116,19 +115,19 @@ public class RestRecoveryAction extends AbstractCatAction { Table t = getTableWithHeader(request); - for (String index : response.shardResponses().keySet()) { + for (String index : response.shardRecoveryStates().keySet()) { - List<ShardRecoveryResponse> shardRecoveryResponses = response.shardResponses().get(index); - if (shardRecoveryResponses.size() == 0) { + List<RecoveryState> shardRecoveryStates = response.shardRecoveryStates().get(index); + if (shardRecoveryStates.size() == 0) { continue; } // Sort ascending by shard id for readability - CollectionUtil.introSort(shardRecoveryResponses, new Comparator<ShardRecoveryResponse>() { + CollectionUtil.introSort(shardRecoveryStates, new Comparator<RecoveryState>() { @Override - public int compare(ShardRecoveryResponse o1, ShardRecoveryResponse o2) { - int id1 = o1.recoveryState().getShardId().id(); - int id2 = o2.recoveryState().getShardId().id(); + public int compare(RecoveryState o1, RecoveryState o2) { + int id1 = o1.getShardId().id(); + int id2 = o2.getShardId().id(); if (id1 < id2) { return -1; } else if (id1 > id2) { @@ -139,12 +138,10 @@ public class RestRecoveryAction extends AbstractCatAction { } }); - for (ShardRecoveryResponse shardResponse : shardRecoveryResponses) { - - RecoveryState state = shardResponse.recoveryState(); + for (RecoveryState state: shardRecoveryStates) { t.startRow(); t.addCell(index); - t.addCell(shardResponse.getShardId()); + t.addCell(state.getShardId().id()); t.addCell(state.getTimer().time()); t.addCell(state.getType().toString().toLowerCase(Locale.ROOT)); t.addCell(state.getStage().toString().toLowerCase(Locale.ROOT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java index 972a1bc0110..734fb340090 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java @@ -21,7 +21,11 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.segments.*; +import org.elasticsearch.action.admin.indices.segments.IndexSegments; +import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; +import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; @@ -29,7 +33,10 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; @@ -120,8 +127,8 @@ public class RestSegmentsAction extends AbstractCatAction { for (Segment segment : segments) { table.startRow(); - table.addCell(shardSegment.getIndex()); - table.addCell(shardSegment.getShardId()); + table.addCell(shardSegment.getShardRouting().getIndex()); + table.addCell(shardSegment.getShardRouting().getId()); table.addCell(shardSegment.getShardRouting().primary() ? "p" : "r"); table.addCell(nodes.get(shardSegment.getShardRouting().currentNodeId()).getHostAddress()); table.addCell(shardSegment.getShardRouting().currentNodeId()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index 69f06cef1f1..209ab686ce5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -50,9 +50,6 @@ public class RestDeleteAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { DeleteRequest deleteRequest = new DeleteRequest(request.param("index"), request.param("type"), request.param("id")); - - deleteRequest.operationThreaded(true); - deleteRequest.routing(request.param("routing")); deleteRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing deleteRequest.timeout(request.paramAsTime("timeout", DeleteRequest.DEFAULT_TIMEOUT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 3d3ecdfa880..d0d0fe68a13 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -70,7 +70,6 @@ public class RestIndexAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id")); - indexRequest.operationThreaded(true); indexRequest.routing(request.param("routing")); indexRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing indexRequest.timestamp(request.param("timestamp")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 3e6eb713529..e1c62049843 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -68,7 +68,7 @@ public class RestTable { public static RestResponse buildTextPlainResponse(Table table, RestChannel channel) throws IOException { RestRequest request = channel.request(); - boolean verbose = request.paramAsBoolean("v", true); + boolean verbose = request.paramAsBoolean("v", false); List<DisplayHeader> headers = buildDisplayHeaders(table, request); int[] width = buildWidths(table, request, verbose, headers); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java index f5de92f67d7..1f06d9713e3 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; @@ -30,6 +29,7 @@ import org.elasticsearch.script.expression.ExpressionScriptEngineService; import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -41,11 +41,11 @@ public class ScriptModule extends AbstractModule { private final Settings settings; - private final List<Class<? extends ScriptEngineService>> scriptEngines = Lists.newArrayList(); + private final List<Class<? extends ScriptEngineService>> scriptEngines = new ArrayList<>(); private final Map<String, Class<? extends NativeScriptFactory>> scripts = Maps.newHashMap(); - private final List<ScriptContext.Plugin> customScriptContexts = Lists.newArrayList(); + private final List<ScriptContext.Plugin> customScriptContexts = new ArrayList<>(); public ScriptModule(Settings settings) { this.settings = settings; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index b94f657de94..ed1ba6cb9ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations; -import com.google.common.collect.Lists; - import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesArray; @@ -29,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -54,7 +53,7 @@ public abstract class AggregationBuilder<B extends AggregationBuilder<B>> extend @SuppressWarnings("unchecked") public B subAggregation(AbstractAggregationBuilder aggregation) { if (aggregations == null) { - aggregations = Lists.newArrayList(); + aggregations = new ArrayList<>(); } aggregations.add(aggregation); return (B) this; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index d4ed3b1af42..167d2d9d09a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations; import com.google.common.collect.ImmutableMap; - import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; @@ -130,6 +129,8 @@ public class AggregationPhase implements SearchPhase { context.searcher().search(query, globalsCollector); } catch (Exception e) { throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e); + } finally { + context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 1f32c0a1687..90c1dee9e50 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -36,6 +34,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.support.AggregationPath; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -228,7 +227,7 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St if (size == 0) { pipelineAggregators = ImmutableList.of(); } else { - pipelineAggregators = Lists.newArrayListWithCapacity(size); + pipelineAggregators = new ArrayList<>(size); for (int i = 0; i < size; i++) { BytesReference type = in.readBytesReference(); PipelineAggregator pipelineAggregator = PipelineAggregatorStreams.stream(type).readResult(in); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index ceefcae41b6..e90547eddf4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -22,9 +22,7 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,6 +41,7 @@ import java.util.List; import java.util.Map; import static com.google.common.collect.Maps.newHashMap; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; /** * An internal implementation of {@link Aggregations}. @@ -84,7 +83,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl */ @Override public List<Aggregation> asList() { - return Lists.transform(aggregations, SUPERTYPE_CAST); + return eagerTransform(aggregations, SUPERTYPE_CAST); } /** @@ -215,7 +214,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl aggregations = ImmutableList.of(); aggregationsAsMap = ImmutableMap.of(); } else { - aggregations = Lists.newArrayListWithCapacity(size); + aggregations = new ArrayList<>(size); for (int i = 0; i < size; i++) { BytesReference type = in.readBytesReference(); InternalAggregation aggregation = AggregationStreams.stream(type).readResult(in); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index 625ef0bdc68..781d47f68eb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.filters; -import com.google.common.collect.Lists; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; @@ -38,6 +36,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -113,7 +112,7 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - List<InternalFilters.Bucket> buckets = Lists.newArrayListWithCapacity(filters.length); + List<InternalFilters.Bucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); @@ -132,7 +131,7 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); - List<InternalFilters.Bucket> buckets = Lists.newArrayListWithCapacity(filters.length); + List<InternalFilters.Bucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { InternalFilters.Bucket bucket = new InternalFilters.Bucket(keys[i], 0, subAggs, keyed); buckets.add(bucket); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java index f5209330009..e415a025a4c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFilters.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.filters; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -118,7 +116,7 @@ public class InternalFilters extends InternalMultiBucketAggregation<InternalFilt Bucket reduce(List<Bucket> buckets, ReduceContext context) { Bucket reduced = null; - List<InternalAggregations> aggregationsList = Lists.newArrayListWithCapacity(buckets.size()); + List<InternalAggregations> aggregationsList = new ArrayList<>(buckets.size()); for (Bucket bucket : buckets) { if (reduced == null) { reduced = new Bucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); @@ -233,7 +231,7 @@ public class InternalFilters extends InternalMultiBucketAggregation<InternalFilt protected void doReadFrom(StreamInput in) throws IOException { keyed = in.readBoolean(); int size = in.readVInt(); - List<Bucket> buckets = Lists.newArrayListWithCapacity(size); + List<Bucket> buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { Bucket bucket = new Bucket(keyed); bucket.readFrom(in); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index d7d3993508d..da3bc286ff9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket.histogram; -import com.google.common.collect.Lists; - import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,6 +40,7 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatterStream import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.ListIterator; @@ -464,7 +463,9 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter // maintains order } else if (order == InternalOrder.KEY_DESC) { // we just need to reverse here... - reducedBuckets = Lists.reverse(reducedBuckets); + List<B> reverse = new ArrayList<>(reducedBuckets); + Collections.reverse(reverse); + reducedBuckets = reverse; } else { // sorted by sub-aggregation, need to fall back to a costly n*log(n) sort CollectionUtil.introSort(reducedBuckets, order.comparator()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java index e15aa57538d..c4f0c7600e7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.bucket.range; -import com.google.common.collect.Lists; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilderException; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -61,7 +61,7 @@ public abstract class AbstractRangeBuilder<B extends AbstractRangeBuilder<B>> ex } } - protected List<Range> ranges = Lists.newArrayList(); + protected List<Range> ranges = new ArrayList<>(); protected AbstractRangeBuilder(String name, String type) { super(name, type); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 73567183414..5303d7f3247 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket.range; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -167,7 +165,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan Bucket reduce(List<Bucket> ranges, ReduceContext context) { long docCount = 0; - List<InternalAggregations> aggregationsList = Lists.newArrayListWithCapacity(ranges.size()); + List<InternalAggregations> aggregationsList = new ArrayList<>(ranges.size()); for (Bucket range : ranges) { docCount += range.docCount; aggregationsList.add(range.aggregations); @@ -315,7 +313,7 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan formatter = ValueFormatterStreams.readOptional(in); keyed = in.readBoolean(); int size = in.readVInt(); - List<B> ranges = Lists.newArrayListWithCapacity(size); + List<B> ranges = new ArrayList<>(size); for (int i = 0; i < size; i++) { String key = in.readOptionalString(); ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(), InternalAggregations.readAggregations(in), keyed, formatter)); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index f8e5782b757..125fca43500 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket.range; -import com.google.common.collect.Lists; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.InPlaceMergeSorter; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -198,7 +196,7 @@ public class RangeAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); + List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i); @@ -213,7 +211,7 @@ public class RangeAggregator extends BucketsAggregator { @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); - List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); + List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java index 2b03503d0ee..e6649fa6a90 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket.range.geodistance; -import com.google.common.collect.Lists; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; @@ -29,6 +28,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilderException; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -80,7 +80,7 @@ public class GeoDistanceBuilder extends AggregationBuilder<GeoDistanceBuilder> { private GeoDistance distanceType; private GeoPoint point; - private List<Range> ranges = Lists.newArrayList(); + private List<Range> ranges = new ArrayList<>(); /** * Sole constructor. diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java index afe3acf0a19..e110cc17288 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java @@ -145,8 +145,8 @@ public class GeoDistanceParser implements Aggregator.Parser { + currentFieldName + "].", parser.getTokenLocation()); } } else { - throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].", - parser.getTokenLocation()); + throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]: [" + + currentFieldName + "].", parser.getTokenLocation()); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index cb14b0df4c2..fcccba073c9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.pipeline; -import com.google.common.collect.Lists; - import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; @@ -33,6 +31,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; + public abstract class SiblingPipelineAggregator extends PipelineAggregator { protected SiblingPipelineAggregator() { // for Serialisation @@ -54,7 +54,7 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { for (int i = 0; i < buckets.size(); i++) { InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(aggToAdd); InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), @@ -66,7 +66,7 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { } else if (aggregation instanceof InternalSingleBucketAggregation) { InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation; InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext); - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(singleBucketAgg.getAggregations().asList(), + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(singleBucketAgg.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(aggToAdd); return singleBucketAgg.create(new InternalAggregations(aggs)); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 3b2a4a5a09b..06559645821 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketscript; import com.google.common.base.Function; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.script.CompiledScript; @@ -50,6 +48,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class BucketScriptPipelineAggregator extends PipelineAggregator { @@ -135,7 +134,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { throw new AggregationExecutionException("series_arithmetic script for reducer [" + name() + "] must return a Number"); } - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION)); + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), FUNCTION)); aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java index 9070cc6aafb..cafefdba2fe 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.pipeline.cumulativesum; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -43,6 +41,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class CumulativeSumPipelineAggregator extends PipelineAggregator { @@ -89,7 +88,7 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator { for (InternalHistogram.Bucket bucket : buckets) { Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS); sum += thisBucketValue; - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), sum, formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java index eeb3192598e..41b904fef88 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.pipeline.derivative; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationExecutionException; @@ -44,6 +42,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class DerivativePipelineAggregator extends PipelineAggregator { @@ -101,7 +100,7 @@ public class DerivativePipelineAggregator extends PipelineAggregator { if (xAxisUnits != null) { xDiff = (thisBucketKey - lastBucketKey) / xAxisUnits; } - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index 5b7a8675540..b1dd92d2122 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -21,8 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg; import com.google.common.base.Function; import com.google.common.collect.EvictingQueue; -import com.google.common.collect.Lists; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregation; @@ -51,6 +49,7 @@ import java.util.List; import java.util.ListIterator; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class MovAvgPipelineAggregator extends PipelineAggregator { @@ -135,7 +134,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { if (model.hasValue(values.size())) { double movavg = model.next(values); - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<PipelineAggregator>(), metaData())); newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( aggs), bucket.getKeyed(), bucket.getFormatter()); @@ -176,7 +175,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { InternalHistogram.Bucket bucket = (InternalHistogram.Bucket) newBuckets.get(lastValidPosition + i + 1); // Get the existing aggs in the bucket so we don't clobber data - aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); + aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations( diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java index 3517a621427..e89813caabb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.pipeline.serialdiff; import com.google.common.collect.EvictingQueue; -import com.google.common.collect.Lists; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +28,10 @@ import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; -import org.elasticsearch.search.aggregations.pipeline.*; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; @@ -38,8 +40,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; public class SerialDiffPipelineAggregator extends PipelineAggregator { @@ -111,7 +114,7 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator { if (!Double.isNaN(thisBucketValue) && !Double.isNaN(lagValue)) { double diff = thisBucketValue - lagValue; - List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); + List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<PipelineAggregator>(), metaData())); newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( aggs), bucket.getKeyed(), bucket.getFormatter()); diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 0eb4c5cc882..90ce07b87ff 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -22,8 +22,6 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.support.QuerySourceBuilder; import org.elasticsearch.action.support.ToXContentToBytes; @@ -37,7 +35,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; @@ -50,7 +47,11 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * A search source builder allowing to easily build search source. Simple @@ -360,7 +361,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { */ public SearchSourceBuilder sort(SortBuilder sort) { if (sorts == null) { - sorts = Lists.newArrayList(); + sorts = new ArrayList<>(); } sorts.add(sort); return this; @@ -380,7 +381,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { */ public SearchSourceBuilder aggregation(AbstractAggregationBuilder aggregation) { if (aggregations == null) { - aggregations = Lists.newArrayList(); + aggregations = new ArrayList<>(); } aggregations.add(aggregation); return this; @@ -599,7 +600,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { */ public SearchSourceBuilder scriptField(String name, Script script) { if (scriptFields == null) { - scriptFields = Lists.newArrayList(); + scriptFields = new ArrayList<>(); } scriptFields.add(new ScriptField(name, script)); return this; diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index 7faaea72ef5..74e263220e4 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -21,8 +21,6 @@ package org.elasticsearch.search.controller; import com.carrotsearch.hppc.IntArrayList; import com.carrotsearch.hppc.ObjectObjectHashMap; -import com.google.common.collect.Lists; - import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.FieldDoc; @@ -65,6 +63,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; + /** * */ @@ -411,7 +411,7 @@ public class SearchPhaseController extends AbstractComponent { if (aggregations != null) { List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators(); if (pipelineAggregators != null) { - List<InternalAggregation> newAggs = new ArrayList<>(Lists.transform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION)); + List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION)); for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) { InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(bigArrays, scriptService)); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 81f13c32727..f9156866200 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -45,20 +45,12 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchPhase; -import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase; -import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase; -import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase; import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.fetch.source.FetchSourceSubPhase; -import org.elasticsearch.search.fetch.version.VersionFetchSubPhase; -import org.elasticsearch.search.highlight.HighlightPhase; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.internal.InternalSearchHits; @@ -73,7 +65,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; /** @@ -146,7 +137,7 @@ public class FetchPhase implements SearchPhase { fieldNames.add(fieldType.names().indexName()); } else { if (extractFieldNames == null) { - extractFieldNames = newArrayList(); + extractFieldNames = new ArrayList<>(); } extractFieldNames.add(fieldName); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java index 026e421a589..1c0eeaa0ef9 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.fetch.explain; import com.google.common.collect.ImmutableMap; import org.apache.lucene.search.Explanation; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -68,6 +67,8 @@ public class ExplainFetchSubPhase implements FetchSubPhase { hitContext.hit().explanation(explanation); } catch (IOException e) { throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e); + } finally { + context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsContext.java index 9f1bfc08ed7..c352e6a945e 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsContext.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.search.fetch.fielddata; -import com.google.common.collect.Lists; import org.elasticsearch.search.fetch.FetchSubPhaseContext; +import java.util.ArrayList; import java.util.List; /** @@ -40,7 +40,7 @@ public class FieldDataFieldsContext extends FetchSubPhaseContext { } } - private List<FieldDataField> fields = Lists.newArrayList(); + private List<FieldDataField> fields = new ArrayList<>(); public FieldDataFieldsContext() { } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index b639a387238..75b4b4f912d 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -135,7 +135,11 @@ public final class InnerHitsContext { } else { topDocsCollector = TopScoreDocCollector.create(topN); } - context.searcher().search(q, topDocsCollector); + try { + context.searcher().search(q, topDocsCollector); + } finally { + clearReleasables(Lifetime.COLLECTION); + } return topDocsCollector.topDocs(from(), size()); } } @@ -306,7 +310,11 @@ public final class InnerHitsContext { } else { topDocsCollector = TopScoreDocCollector.create(topN); } - context.searcher().search( q, topDocsCollector); + try { + context.searcher().search(q, topDocsCollector); + } finally { + clearReleasables(Lifetime.COLLECTION); + } return topDocsCollector.topDocs(from(), size()); } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsSubSearchContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsSubSearchContext.java new file mode 100644 index 00000000000..35ab2e1bba7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsSubSearchContext.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.innerhits; + +import org.elasticsearch.search.internal.SubSearchContext; + +public class InnerHitsSubSearchContext { + private final String name; + private final SubSearchContext subSearchContext; + + public InnerHitsSubSearchContext(String name, SubSearchContext subSearchContext) { + this.name = name; + this.subSearchContext = subSearchContext; + } + + public String getName() { + return name; + } + + public SubSearchContext getSubSearchContext() { + return subSearchContext; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java index ac4dfa5f7e3..75e62d63f9d 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.fetch.matchedqueries; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -33,6 +31,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -63,7 +62,7 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase { @Override public void hitExecute(SearchContext context, HitContext hitContext) { - List<String> matchedQueries = Lists.newArrayListWithCapacity(2); + List<String> matchedQueries = new ArrayList<>(2); try { addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java index 27a7b2a125a..9cf680d228a 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.fetch.script; -import com.google.common.collect.Lists; import org.elasticsearch.script.SearchScript; +import java.util.ArrayList; import java.util.List; /** @@ -53,7 +53,7 @@ public class ScriptFieldsContext { } } - private List<ScriptField> fields = Lists.newArrayList(); + private List<ScriptField> fields = new ArrayList<>(); public ScriptFieldsContext() { } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index 79fb54c0ee3..c082859a0ed 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -26,11 +26,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; - /** * A builder for search highlighting. Settings can control how large fields * are summarized to show only selected snippets ("fragments") containing search terms. @@ -87,7 +86,7 @@ public class HighlightBuilder implements ToXContent { */ public HighlightBuilder field(String name) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(new Field(name)); return this; @@ -103,7 +102,7 @@ public class HighlightBuilder implements ToXContent { */ public HighlightBuilder field(String name, int fragmentSize) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(new Field(name).fragmentSize(fragmentSize)); return this; @@ -120,7 +119,7 @@ public class HighlightBuilder implements ToXContent { */ public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments)); return this; @@ -138,7 +137,7 @@ public class HighlightBuilder implements ToXContent { */ public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments, int fragmentOffset) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments) .fragmentOffset(fragmentOffset)); @@ -147,7 +146,7 @@ public class HighlightBuilder implements ToXContent { public HighlightBuilder field(Field field) { if (fields == null) { - fields = newArrayList(); + fields = new ArrayList<>(); } fields.add(field); return this; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java index 802122f648d..74723c7e3f5 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.highlight; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentParser; @@ -31,11 +29,10 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; - /** * <pre> * highlight : { @@ -78,7 +75,7 @@ public class HighlighterParseElement implements SearchParseElement { public SearchContextHighlight parse(XContentParser parser, IndexQueryParserService queryParserService) throws IOException { XContentParser.Token token; String topLevelFieldName = null; - final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = newArrayList(); + final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>(); final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder() .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(false).highlightFilter(false) @@ -92,13 +89,13 @@ public class HighlighterParseElement implements SearchParseElement { topLevelFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if ("pre_tags".equals(topLevelFieldName) || "preTags".equals(topLevelFieldName)) { - List<String> preTagsList = Lists.newArrayList(); + List<String> preTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { preTagsList.add(parser.text()); } globalOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); } else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) { - List<String> postTagsList = Lists.newArrayList(); + List<String> postTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { postTagsList.add(parser.text()); } @@ -184,7 +181,7 @@ public class HighlighterParseElement implements SearchParseElement { throw new IllegalArgumentException("Highlighter global preTags are set, but global postTags are not set"); } - final List<SearchContextHighlight.Field> fields = Lists.newArrayList(); + final List<SearchContextHighlight.Field> fields = new ArrayList<>(); // now, go over and fill all fieldsOptions with default values from the global state for (final Tuple<String, SearchContextHighlight.FieldOptions.Builder> tuple : fieldsOptions) { fields.add(new SearchContextHighlight.Field(tuple.v1(), tuple.v2().merge(globalOptions).build())); @@ -202,13 +199,13 @@ public class HighlighterParseElement implements SearchParseElement { fieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if ("pre_tags".equals(fieldName) || "preTags".equals(fieldName)) { - List<String> preTagsList = Lists.newArrayList(); + List<String> preTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { preTagsList.add(parser.text()); } fieldOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); } else if ("post_tags".equals(fieldName) || "postTags".equals(fieldName)) { - List<String> postTagsList = Lists.newArrayList(); + List<String> postTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { postTagsList.add(parser.text()); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 2f55bf8f541..369d7616bcc 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -19,18 +19,14 @@ package org.elasticsearch.search.internal; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.search.*; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; -import org.elasticsearch.search.internal.SearchContext.Lifetime; import java.io.IOException; -import java.util.List; /** * Context-aware extension of {@link IndexSearcher}. @@ -42,14 +38,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { * AssertingIndexSearcher. */ private final IndexSearcher in; - private final SearchContext searchContext; - private AggregatedDfs aggregatedDfs; public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) { super(searcher.reader()); in = searcher.searcher(); - this.searchContext = searchContext; setSimilarity(searcher.searcher().getSimilarity(true)); setQueryCache(searchContext.indexShard().indexService().cache().query()); setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy()); @@ -65,46 +58,23 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { @Override public Query rewrite(Query original) throws IOException { - try { - return in.rewrite(original); - } catch (Throwable t) { - searchContext.clearReleasables(Lifetime.COLLECTION); - throw ExceptionsHelper.convertToElastic(t); - } + return in.rewrite(original); } @Override public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { // During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher // it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher... - try { + if (aggregatedDfs != null && needsScores) { // if scores are needed and we have dfs data then use it - if (aggregatedDfs != null && needsScores) { - return super.createNormalizedWeight(query, needsScores); - } - return in.createNormalizedWeight(query, needsScores); - } catch (Throwable t) { - searchContext.clearReleasables(Lifetime.COLLECTION); - throw ExceptionsHelper.convertToElastic(t); + return super.createNormalizedWeight(query, needsScores); } + return in.createNormalizedWeight(query, needsScores); } @Override public Explanation explain(Query query, int doc) throws IOException { - try { - return in.explain(query, doc); - } finally { - searchContext.clearReleasables(Lifetime.COLLECTION); - } - } - - @Override - protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException { - try { - super.search(leaves, weight, collector); - } finally { - searchContext.clearReleasables(Lifetime.COLLECTION); - } + return in.explain(query, doc); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index a3015b94bab..fdaac5e96aa 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -21,14 +21,21 @@ package org.elasticsearch.search.internal; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.*; +import org.elasticsearch.common.HasContext; +import org.elasticsearch.common.HasContextAndHeaders; +import org.elasticsearch.common.HasHeaders; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.search.Queries; @@ -67,7 +74,12 @@ import org.elasticsearch.search.scan.ScanContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * @@ -567,7 +579,7 @@ public class DefaultSearchContext extends SearchContext { @Override public List<String> fieldNames() { if (fieldNames == null) { - fieldNames = Lists.newArrayList(); + fieldNames = new ArrayList<>(); } return fieldNames; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index f2ed2676f7c..7a349cfbbb9 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.internal; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; @@ -47,6 +46,7 @@ import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardT import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -431,8 +431,8 @@ public class InternalSearchHit implements SearchHit { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - List<SearchHitField> metaFields = Lists.newArrayList(); - List<SearchHitField> otherFields = Lists.newArrayList(); + List<SearchHitField> metaFields = new ArrayList<>(); + List<SearchHitField> otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { for (SearchHitField field : fields.values()) { if (field.values().isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 781d13a3b86..a7e45d18fb6 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.internal; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; - import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -46,7 +45,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.Scroll; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.dfs.DfsSearchResult; diff --git a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index a1a6fd0abff..4596375d119 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.internal; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; @@ -36,6 +35,7 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import java.util.ArrayList; import java.util.List; /** @@ -233,7 +233,7 @@ public class SubSearchContext extends FilteredSearchContext { @Override public List<String> fieldNames() { if (fieldNames == null) { - fieldNames = Lists.newArrayList(); + fieldNames = new ArrayList<>(); } return fieldNames; } diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 8f21f460130..06451af2be9 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -20,24 +20,8 @@ package org.elasticsearch.search.query; import com.google.common.collect.ImmutableMap; - import org.apache.lucene.queries.MinDocQuery; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.FieldDoc; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.MultiCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Sort; -import org.apache.lucene.search.TimeLimitingCollector; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; -import org.apache.lucene.search.TotalHitCountCollector; -import org.apache.lucene.search.Weight; +import org.apache.lucene.search.*; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; @@ -173,8 +157,8 @@ public class QueryPhase implements SearchPhase { // skip to the desired doc and stop collecting after ${size} matches if (scrollContext.lastEmittedDoc != null) { BooleanQuery bq = new BooleanQuery(); - bq.add(query, Occur.MUST); - bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), Occur.FILTER); + bq.add(query, BooleanClause.Occur.MUST); + bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER); query = bq; } searchContext.terminateAfter(numDocs); @@ -264,13 +248,15 @@ public class QueryPhase implements SearchPhase { } try { - searcher.search(query, collector); + searchContext.searcher().search(query, collector); } catch (TimeLimitingCollector.TimeExceededException e) { assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set"; searchContext.queryResult().searchTimedOut(true); } catch (Lucene.EarlyTerminationException e) { assert terminateAfterSet : "EarlyTerminationException thrown even though terminateAfter wasn't set"; searchContext.queryResult().terminatedEarly(true); + } finally { + searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); } if (terminateAfterSet && searchContext.queryResult().terminatedEarly() == null) { searchContext.queryResult().terminatedEarly(false); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 39da982e13f..2f4dcb37acb 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.sort; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.apache.lucene.search.Filter; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.Sort; @@ -44,6 +42,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -80,7 +79,7 @@ public class SortParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { XContentParser.Token token = parser.currentToken(); - List<SortField> sortFields = Lists.newArrayListWithCapacity(2); + List<SortField> sortFields = new ArrayList<>(2); if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/context/CategoryContextMapping.java index 975b42a0f01..ad075ac9b38 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/context/CategoryContextMapping.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest.context; import com.google.common.base.Joiner; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.lucene.analysis.PrefixAnalyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.IndexableField; @@ -36,7 +35,11 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; /** * The {@link CategoryContextMapping} is used to define a {@link ContextMapping} that @@ -165,7 +168,7 @@ public class CategoryContextMapping extends ContextMapping { } else if (token == Token.VALUE_BOOLEAN) { return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); } else if (token == Token.START_ARRAY) { - ArrayList<String> values = Lists.newArrayList(); + ArrayList<String> values = new ArrayList<>(); while((token = parser.nextToken()) != Token.END_ARRAY) { values.add(parser.text()); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/context/ContextMapping.java index 43956c99dc9..30aaab69153 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/context/ContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/context/ContextMapping.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.suggest.context; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; import org.apache.lucene.util.automaton.Automata; @@ -39,7 +37,12 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; /** * A {@link ContextMapping} is used t define a context that may used @@ -300,7 +303,7 @@ public abstract class ContextMapping implements ToXContent { } } - List<ContextQuery> queries = Lists.newArrayListWithExpectedSize(mappings.size()); + List<ContextQuery> queries = new ArrayList<>(mappings.size()); for (ContextMapping mapping : mappings.values()) { queries.add(querySet.get(mapping.name)); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java index 22975ff83f3..a02868aa49e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.context; import com.carrotsearch.hppc.IntHashSet; -import com.google.common.collect.Lists; import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.DocValuesType; @@ -43,7 +42,13 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; /** * The {@link GeolocationContextMapping} allows to take GeoInfomation into account @@ -231,7 +236,7 @@ public class GeolocationContextMapping extends ContextMapping { } } else { // otherwise it's a list of locations - ArrayList<String> result = Lists.newArrayList(); + ArrayList<String> result = new ArrayList<>(); while (token != Token.END_ARRAY) { result.add(GeoUtils.parseGeoPoint(parser).geohash()); token = parser.nextToken(); //infinite loop without this line diff --git a/core/src/main/java/org/elasticsearch/search/warmer/IndexWarmersMetaData.java b/core/src/main/java/org/elasticsearch/search/warmer/IndexWarmersMetaData.java index f2a996d2566..336f849172a 100644 --- a/core/src/main/java/org/elasticsearch/search/warmer/IndexWarmersMetaData.java +++ b/core/src/main/java/org/elasticsearch/search/warmer/IndexWarmersMetaData.java @@ -21,8 +21,6 @@ package org.elasticsearch.search.warmer; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; @@ -31,7 +29,11 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; @@ -260,7 +262,7 @@ public class IndexWarmersMetaData extends AbstractDiffable<IndexMetaData.Custom> @Override public IndexMetaData.Custom mergeWith(IndexMetaData.Custom other) { IndexWarmersMetaData second = (IndexWarmersMetaData) other; - List<Entry> entries = Lists.newArrayList(); + List<Entry> entries = new ArrayList<>(); entries.addAll(entries()); for (Entry secondEntry : second.entries()) { boolean found = false; diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index d8c6fdc8b07..8738de1d948 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -25,15 +25,31 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; -import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.RestoreInProgress.ShardRestoreStatus; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.settings.ClusterDynamicSettings; @@ -50,20 +66,28 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.StoreRecoveryService; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.StoreRecoveryService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; @@ -350,7 +374,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis Settings normalizedChangeSettings = Settings.settingsBuilder().put(changeSettings).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build(); IndexMetaData.Builder builder = IndexMetaData.builder(indexMetaData); Map<String, String> settingsMap = newHashMap(indexMetaData.settings().getAsMap()); - List<String> simpleMatchPatterns = newArrayList(); + List<String> simpleMatchPatterns = new ArrayList<>(); for (String ignoredSetting : ignoreSettings) { if (!Regex.isSimpleMatchPattern(ignoredSetting)) { if (UNREMOVABLE_SETTINGS.contains(ignoredSetting)) { @@ -515,7 +539,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis final RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); if (restore != null) { int changedCount = 0; - final List<RestoreInProgress.Entry> entries = newArrayList(); + final List<RestoreInProgress.Entry> entries = new ArrayList<>(); for (RestoreInProgress.Entry entry : restore.entries()) { Map<ShardId, ShardRestoreStatus> shards = null; @@ -577,7 +601,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis final RestoreInfo restoreInfo = entry.getValue().v1(); final Map<ShardId, ShardRestoreStatus> shards = entry.getValue().v2(); RoutingTable routingTable = newState.getRoutingTable(); - final List<ShardId> waitForStarted = newArrayList(); + final List<ShardId> waitForStarted = new ArrayList<>(); for (Map.Entry<ShardId, ShardRestoreStatus> shard : shards.entrySet()) { if (shard.getValue().state() == RestoreInProgress.State.SUCCESS ) { ShardId shardId = shard.getKey(); @@ -711,7 +735,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis if (!shard.getValue().state().completed()) { if (!event.state().metaData().hasIndex(shard.getKey().getIndex())) { if (shardsToFail == null) { - shardsToFail = newArrayList(); + shardsToFail = new ArrayList<>(); } shardsToFail.add(shard.getKey()); } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 03b516f7cdc..b6083630f4f 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -21,7 +21,12 @@ package org.elasticsearch.snapshots; import com.google.common.collect.ImmutableMap; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -43,10 +48,17 @@ import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; @@ -54,7 +66,6 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.cluster.SnapshotsInProgress.completed; @@ -506,7 +517,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent<SnapshotSh final SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); if (snapshots != null) { int changedCount = 0; - final List<SnapshotsInProgress.Entry> entries = newArrayList(); + final List<SnapshotsInProgress.Entry> entries = new ArrayList<>(); for (SnapshotsInProgress.Entry entry : snapshots.entries()) { final Map<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shards = newHashMap(); boolean updated = false; diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index da796d1b502..79f6e863f6a 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -25,10 +25,21 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus; import org.elasticsearch.cluster.SnapshotsInProgress.State; -import org.elasticsearch.cluster.metadata.*; +import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -51,10 +62,13 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.SnapshotsInProgress.completed; @@ -134,7 +148,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic for (SnapshotId snapshotId : snapshotIds) { snapshotSet.add(repository.readSnapshot(snapshotId)); } - ArrayList<Snapshot> snapshotList = newArrayList(snapshotSet); + ArrayList<Snapshot> snapshotList = new ArrayList<>(snapshotSet); CollectionUtil.timSort(snapshotList); return ImmutableList.copyOf(snapshotList); } @@ -146,7 +160,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic * @return list of snapshots */ public List<Snapshot> currentSnapshots(String repositoryName) { - List<Snapshot> snapshotList = newArrayList(); + List<Snapshot> snapshotList = new ArrayList<>(); List<SnapshotsInProgress.Entry> entries = currentSnapshots(repositoryName, null); for (SnapshotsInProgress.Entry entry : entries) { snapshotList.add(inProgressSnapshot(entry)); @@ -524,7 +538,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic return currentState; } boolean changed = false; - ArrayList<SnapshotsInProgress.Entry> entries = newArrayList(); + ArrayList<SnapshotsInProgress.Entry> entries = new ArrayList<>(); for (final SnapshotsInProgress.Entry snapshot : snapshots.entries()) { SnapshotsInProgress.Entry updatedSnapshot = snapshot; boolean snapshotChanged = false; @@ -596,7 +610,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); if (snapshots != null) { boolean changed = false; - ArrayList<SnapshotsInProgress.Entry> entries = newArrayList(); + ArrayList<SnapshotsInProgress.Entry> entries = new ArrayList<>(); for (final SnapshotsInProgress.Entry snapshot : snapshots.entries()) { SnapshotsInProgress.Entry updatedSnapshot = snapshot; if (snapshot.state() == State.STARTED) { @@ -763,8 +777,8 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic try { final Repository repository = repositoriesService.repository(snapshotId.getRepository()); logger.trace("[{}] finalizing snapshot in repository, state: [{}], failure[{}]", snapshotId, entry.state(), failure); - ArrayList<ShardSearchFailure> failures = newArrayList(); - ArrayList<SnapshotShardFailure> shardFailures = newArrayList(); + ArrayList<ShardSearchFailure> failures = new ArrayList<>(); + ArrayList<SnapshotShardFailure> shardFailures = new ArrayList<>(); for (Map.Entry<ShardId, ShardSnapshotStatus> shardStatus : entry.shards().entrySet()) { ShardId shardId = shardStatus.getKey(); ShardSnapshotStatus status = shardStatus.getValue(); @@ -797,7 +811,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); if (snapshots != null) { boolean changed = false; - ArrayList<SnapshotsInProgress.Entry> entries = newArrayList(); + ArrayList<SnapshotsInProgress.Entry> entries = new ArrayList<>(); for (SnapshotsInProgress.Entry entry : snapshots.entries()) { if (entry.snapshotId().equals(snapshotId)) { changed = true; diff --git a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 2b8caf8f055..2aa6818c878 100644 --- a/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/core/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport; import java.lang.reflect.Constructor; +import java.util.concurrent.Callable; /** * @@ -28,20 +29,19 @@ import java.lang.reflect.Constructor; public class RequestHandlerRegistry<Request extends TransportRequest> { private final String action; - private final Constructor<Request> requestConstructor; private final TransportRequestHandler<Request> handler; private final boolean forceExecution; private final String executor; + private final Callable<Request> requestFactory; RequestHandlerRegistry(String action, Class<Request> request, TransportRequestHandler<Request> handler, String executor, boolean forceExecution) { + this(action, new ReflectionFactory<>(request), handler, executor, forceExecution); + } + + public RequestHandlerRegistry(String action, Callable<Request> requestFactory, TransportRequestHandler<Request> handler, String executor, boolean forceExecution) { this.action = action; - try { - this.requestConstructor = request.getDeclaredConstructor(); - } catch (NoSuchMethodException e) { - throw new IllegalStateException("failed to create constructor (does it have a default constructor?) for request " + request, e); - } - this.requestConstructor.setAccessible(true); + this.requestFactory = requestFactory; assert newRequest() != null; this.handler = handler; this.forceExecution = forceExecution; @@ -54,7 +54,7 @@ public class RequestHandlerRegistry<Request extends TransportRequest> { public Request newRequest() { try { - return requestConstructor.newInstance(); + return requestFactory.call(); } catch (Exception e) { throw new IllegalStateException("failed to instantiate request ", e); } @@ -71,4 +71,22 @@ public class RequestHandlerRegistry<Request extends TransportRequest> { public String getExecutor() { return executor; } + + private final static class ReflectionFactory<Request> implements Callable<Request> { + private final Constructor<Request> requestConstructor; + + public ReflectionFactory(Class<Request> request) { + try { + this.requestConstructor = request.getDeclaredConstructor(); + } catch (NoSuchMethodException e) { + throw new IllegalStateException("failed to create constructor (does it have a default constructor?) for request " + request, e); + } + this.requestConstructor.setAccessible(true); + } + + @Override + public Request call() throws Exception { + return requestConstructor.newInstance(); + } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index b70589ce52d..40fa908c2b3 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -22,8 +22,6 @@ package org.elasticsearch.transport; import com.google.common.collect.ImmutableMap; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -35,12 +33,21 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.concurrent.*; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; @@ -399,6 +406,18 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic registerRequestHandler(action, request, executor, false, handler); } + /** + * Registers a new request handler + * @param action The action the request handler is associated with + * @param requestFactory a callable to be used construct new instances for streaming + * @param executor The executor the request handling will be executed on + * @param handler The handler itself that implements the request handling + */ + public <Request extends TransportRequest> void registerRequestHandler(String action, Callable<Request> requestFactory, String executor, TransportRequestHandler<Request> handler) { + RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(action, requestFactory, handler, executor, false); + registerRequestHandler(reg); + } + /** * Registers a new request handler * @param action The action the request handler is associated with @@ -408,8 +427,12 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic * @param handler The handler itself that implements the request handling */ public <Request extends TransportRequest> void registerRequestHandler(String action, Class<Request> request, String executor, boolean forceExecution, TransportRequestHandler<Request> handler) { + RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(action, request, handler, executor, forceExecution); + registerRequestHandler(reg); + } + + protected <Request extends TransportRequest> void registerRequestHandler(RequestHandlerRegistry<Request> reg) { synchronized (requestHandlerMutex) { - RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(action, request, handler, executor, forceExecution); RequestHandlerRegistry replaced = requestHandlers.get(reg.getAction()); requestHandlers = MapBuilder.newMapBuilder(requestHandlers).put(reg.getAction(), reg).immutableMap(); if (replaced != null) { diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index f2f21a1d1ca..3cc7b779019 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -20,11 +20,14 @@ package org.elasticsearch.tribe; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.ClusterStateNonMasterUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -52,6 +55,7 @@ import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; /** * The tribe service holds a list of node clients connected to a list of tribe members, and uses their @@ -115,7 +119,7 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> { private final String onConflict; private final Set<String> droppedIndices = ConcurrentCollections.newConcurrentSet(); - private final List<Node> nodes = Lists.newCopyOnWriteArrayList(); + private final List<Node> nodes = new CopyOnWriteArrayList<>(); @Inject public TribeService(Settings settings, ClusterService clusterService, DiscoveryService discoveryService) { diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help index 73757794508..6bce1dd6f5b 100644 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help @@ -8,31 +8,26 @@ SYNOPSIS DESCRIPTION - This command installs an elasticsearch plugin + This command installs an elasticsearch plugin. It can be used as follows: - The argument can be a <name> of one of the official plugins, or refer to a github repository + Officially supported or commercial plugins require just the plugin name: - The notation of just specifying a plugin name, downloads an officially supported plugin. + plugin install analysis-icu + plugin install shield - The notation of 'elasticsearch/plugin/version' allows to easily download a commercial elastic plugin. + Plugins from GitHub require 'username/repository' or 'username/repository/version': - The notation of 'groupId/artifactId/version' refers to community plugins using maven central or sonatype + plugin install lmenezes/elasticsearch-kopf + plugin install lmenezes/elasticsearch-kopf/1.5.7 - The notation of 'username/repository' refers to a github repository. + Plugins from Maven Central or Sonatype require 'groupId/artifactId/version': - The argument can be an valid <url> which points to a download or file location for the plugin to be loaded from. + plugin install org.elasticsearch/elasticsearch-mapper-attachments/2.6.0 -EXAMPLES + Plugins can be installed from a custom URL or file location as follows: - plugin install analysis-kuromoji - - plugin install elasticsearch/shield/latest - - plugin install lmenezes/elasticsearch-kopf - - plugin install http://download.elasticsearch.org/elasticsearch/elasticsearch-analysis-kuromoji/elasticsearch-analysis-kuromoji-2.7.0.zip - - plugin install file:/path/to/plugin/elasticsearch-analysis-kuromoji-2.7.0.zip + plugin install http://some.domain.name//my-plugin-1.0.0.zip + plugin install file:/path/to/my-plugin-1.0.0.zip OFFICIAL PLUGINS diff --git a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index dd31d4f352f..c22638268ed 100644 --- a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -115,6 +115,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -401,7 +402,7 @@ public class IndicesRequestIT extends ESIntegTestCase { @Test public void testOptimize() { - String optimizeShardAction = OptimizeAction.NAME + "[s]"; + String optimizeShardAction = OptimizeAction.NAME + "[n]"; interceptTransportActions(optimizeShardAction); OptimizeRequest optimizeRequest = new OptimizeRequest(randomIndicesOrAliases()); @@ -425,7 +426,7 @@ public class IndicesRequestIT extends ESIntegTestCase { @Test public void testClearCache() { - String clearCacheAction = ClearIndicesCacheAction.NAME + "[s]"; + String clearCacheAction = ClearIndicesCacheAction.NAME + "[n]"; interceptTransportActions(clearCacheAction); ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(randomIndicesOrAliases()); @@ -437,7 +438,7 @@ public class IndicesRequestIT extends ESIntegTestCase { @Test public void testRecovery() { - String recoveryAction = RecoveryAction.NAME + "[s]"; + String recoveryAction = RecoveryAction.NAME + "[n]"; interceptTransportActions(recoveryAction); RecoveryRequest recoveryRequest = new RecoveryRequest(randomIndicesOrAliases()); @@ -449,7 +450,7 @@ public class IndicesRequestIT extends ESIntegTestCase { @Test public void testSegments() { - String segmentsAction = IndicesSegmentsAction.NAME + "[s]"; + String segmentsAction = IndicesSegmentsAction.NAME + "[n]"; interceptTransportActions(segmentsAction); IndicesSegmentsRequest segmentsRequest = new IndicesSegmentsRequest(randomIndicesOrAliases()); @@ -461,7 +462,7 @@ public class IndicesRequestIT extends ESIntegTestCase { @Test public void testIndicesStats() { - String indicesStats = IndicesStatsAction.NAME + "[s]"; + String indicesStats = IndicesStatsAction.NAME + "[n]"; interceptTransportActions(indicesStats); IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(randomIndicesOrAliases()); @@ -890,6 +891,11 @@ public class IndicesRequestIT extends ESIntegTestCase { super.registerRequestHandler(action, request, executor, forceExecution, new InterceptingRequestHandler(action, handler)); } + @Override + public <Request extends TransportRequest> void registerRequestHandler(String action, Callable<Request> requestFactory, String executor, TransportRequestHandler<Request> handler) { + super.registerRequestHandler(action, requestFactory, executor, new InterceptingRequestHandler(action, handler)); + } + private class InterceptingRequestHandler implements TransportRequestHandler { private final TransportRequestHandler requestHandler; diff --git a/core/src/test/java/org/elasticsearch/action/RejectionActionIT.java b/core/src/test/java/org/elasticsearch/action/RejectionActionIT.java index ff094e09a4c..3c59f677f55 100644 --- a/core/src/test/java/org/elasticsearch/action/RejectionActionIT.java +++ b/core/src/test/java/org/elasticsearch/action/RejectionActionIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.action; -import com.google.common.collect.Lists; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -65,7 +64,7 @@ public class RejectionActionIT extends ESIntegTestCase { int numberOfAsyncOps = randomIntBetween(200, 700); final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); - final CopyOnWriteArrayList<Object> responses = Lists.newCopyOnWriteArrayList(); + final CopyOnWriteArrayList<Object> responses = new CopyOnWriteArrayList<>(); for (int i = 0; i < numberOfAsyncOps; i++) { client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 0966212fe3a..091462897ab 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.template.put; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.Version; @@ -33,12 +32,12 @@ import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.test.ESTestCase; import org.junit.Test; +import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.contains; public class MetaDataIndexTemplateServiceTests extends ESTestCase { @Test @@ -89,7 +88,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { ); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, null); - final List<Throwable> throwables = Lists.newArrayList(); + final List<Throwable> throwables = new ArrayList<>(); service.putTemplate(request, new MetaDataIndexTemplateService.PutListener() { @Override public void onResponse(MetaDataIndexTemplateService.PutResponse response) { diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 41fb1d919b0..b1fd92aa914 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -30,7 +29,12 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -71,7 +75,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }; - ArrayList<ActionFilter> actionFiltersByOrder = Lists.newArrayList(filters); + ArrayList<ActionFilter> actionFiltersByOrder = new ArrayList<>(filters); Collections.sort(actionFiltersByOrder, new Comparator<ActionFilter>() { @Override public int compare(ActionFilter o1, ActionFilter o2) { @@ -79,7 +83,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }); - List<ActionFilter> expectedActionFilters = Lists.newArrayList(); + List<ActionFilter> expectedActionFilters = new ArrayList<>(); boolean errorExpected = false; for (ActionFilter filter : actionFiltersByOrder) { RequestTestFilter testFilter = (RequestTestFilter) filter; @@ -101,7 +105,7 @@ public class TransportActionFilterChainTests extends ESTestCase { assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); } - List<RequestTestFilter> testFiltersByLastExecution = Lists.newArrayList(); + List<RequestTestFilter> testFiltersByLastExecution = new ArrayList<>(); for (ActionFilter actionFilter : actionFilters.filters()) { testFiltersByLastExecution.add((RequestTestFilter) actionFilter); } @@ -112,7 +116,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }); - ArrayList<RequestTestFilter> finalTestFilters = Lists.newArrayList(); + ArrayList<RequestTestFilter> finalTestFilters = new ArrayList<>(); for (ActionFilter filter : testFiltersByLastExecution) { RequestTestFilter testFilter = (RequestTestFilter) filter; finalTestFilters.add(testFilter); @@ -153,7 +157,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }; - ArrayList<ActionFilter> actionFiltersByOrder = Lists.newArrayList(filters); + ArrayList<ActionFilter> actionFiltersByOrder = new ArrayList<>(filters); Collections.sort(actionFiltersByOrder, new Comparator<ActionFilter>() { @Override public int compare(ActionFilter o1, ActionFilter o2) { @@ -161,7 +165,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }); - List<ActionFilter> expectedActionFilters = Lists.newArrayList(); + List<ActionFilter> expectedActionFilters = new ArrayList<>(); boolean errorExpected = false; for (ActionFilter filter : actionFiltersByOrder) { ResponseTestFilter testFilter = (ResponseTestFilter) filter; @@ -183,7 +187,7 @@ public class TransportActionFilterChainTests extends ESTestCase { assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); } - List<ResponseTestFilter> testFiltersByLastExecution = Lists.newArrayList(); + List<ResponseTestFilter> testFiltersByLastExecution = new ArrayList<>(); for (ActionFilter actionFilter : actionFilters.filters()) { testFiltersByLastExecution.add((ResponseTestFilter) actionFilter); } @@ -194,7 +198,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } }); - ArrayList<ResponseTestFilter> finalTestFilters = Lists.newArrayList(); + ArrayList<ResponseTestFilter> finalTestFilters = new ArrayList<>(); for (ActionFilter filter : testFiltersByLastExecution) { ResponseTestFilter testFilter = (ResponseTestFilter) filter; finalTestFilters.add(testFilter); diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java new file mode 100644 index 00000000000..c6e2f2cf942 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -0,0 +1,422 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.broadcast.node; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportService; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.object.HasToString.hasToString; + +public class TransportBroadcastByNodeActionTests extends ESTestCase { + + private static final String TEST_INDEX = "test-index"; + private static final String TEST_CLUSTER = "test-cluster"; + private static ThreadPool THREAD_POOL; + + private TestClusterService clusterService; + private CapturingTransport transport; + private TransportService transportService; + + private TestTransportBroadcastByNodeAction action; + + public static class Request extends BroadcastRequest<Request> { + public Request() { + } + + public Request(String[] indices) { + super(indices); + } + } + + public static class Response extends BroadcastResponse { + public Response() { + } + + public Response(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { + super(totalShards, successfulShards, failedShards, shardFailures); + } + } + + class TestTransportBroadcastByNodeAction extends TransportBroadcastByNodeAction<Request, Response, TransportBroadcastByNodeAction.EmptyResult> { + private final Map<ShardRouting, Object> shards = new HashMap<>(); + + public TestTransportBroadcastByNodeAction(Settings settings, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Class<Request> request, String executor) { + super(settings, "indices:admin/test", THREAD_POOL, TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); + } + + @Override + protected EmptyResult readShardResult(StreamInput in) throws IOException { + return EmptyResult.readEmptyResultFrom(in); + } + + @Override + protected Response newResponse(Request request, int totalShards, int successfulShards, int failedShards, List<EmptyResult> emptyResults, List<ShardOperationFailedException> shardFailures) { + return new Response(totalShards, successfulShards, failedShards, shardFailures); + } + + @Override + protected Request readRequestFrom(StreamInput in) throws IOException { + final Request request = new Request(); + request.readFrom(in); + return request; + } + + @Override + protected EmptyResult shardOperation(Request request, ShardRouting shardRouting) { + if (rarely()) { + shards.put(shardRouting, Boolean.TRUE); + return EmptyResult.INSTANCE; + } else { + ElasticsearchException e = new ElasticsearchException("operation failed"); + shards.put(shardRouting, e); + throw e; + } + } + + @Override + protected ShardsIterator shards(ClusterState clusterState, Request request, String[] concreteIndices) { + return clusterState.routingTable().allShards(new String[]{TEST_INDEX}); + } + + @Override + protected ClusterBlockException checkGlobalBlock(ClusterState state, Request request) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); + } + + public Map<ShardRouting, Object> getResults() { + return shards; + } + } + + class MyResolver extends IndexNameExpressionResolver { + public MyResolver() { + super(Settings.EMPTY); + } + + @Override + public String[] concreteIndices(ClusterState state, IndicesRequest request) { + return request.indices(); + } + } + + @BeforeClass + public static void startThreadPool() { + THREAD_POOL = new ThreadPool(TransportBroadcastByNodeActionTests.class.getSimpleName()); + } + + @Before + public void setUp() throws Exception { + super.setUp(); + transport = new CapturingTransport(); + clusterService = new TestClusterService(THREAD_POOL); + transportService = new TransportService(transport, THREAD_POOL); + transportService.start(); + setClusterState(clusterService, TEST_INDEX); + action = new TestTransportBroadcastByNodeAction( + Settings.EMPTY, + transportService, + new ActionFilters(new HashSet<ActionFilter>()), + new MyResolver(), + Request.class, + ThreadPool.Names.SAME + ); + } + + void setClusterState(TestClusterService clusterService, String index) { + int numberOfNodes = randomIntBetween(3, 5); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index); + + int shardIndex = -1; + for (int i = 0; i < numberOfNodes; i++) { + final DiscoveryNode node = newNode(i); + discoBuilder = discoBuilder.put(node); + int numberOfShards = randomIntBetween(0, 10); + for (int j = 0; j < numberOfShards; j++) { + final ShardId shardId = new ShardId(index, ++shardIndex); + ShardRouting shard = TestShardRouting.newShardRouting(index, shardId.getId(), node.id(), true, ShardRoutingState.STARTED, 1); + IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(shardId); + indexShard.addShard(shard); + indexRoutingTable.addIndexShard(indexShard.build()); + } + } + discoBuilder.localNodeId(newNode(0).id()); + discoBuilder.masterNodeId(newNode(numberOfNodes - 1).id()); + ClusterState.Builder stateBuilder = ClusterState.builder(new ClusterName(TEST_CLUSTER)); + stateBuilder.nodes(discoBuilder); + stateBuilder.routingTable(RoutingTable.builder().add(indexRoutingTable.build()).build()); + ClusterState clusterState = stateBuilder.build(); + clusterService.setState(clusterState); + } + + static DiscoveryNode newNode(int nodeId) { + return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, Version.CURRENT); + } + + @AfterClass + public static void destroyThreadPool() { + ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); + // since static must set to null to be eligible for collection + THREAD_POOL = null; + } + + public void testGlobalBlock() { + Request request = new Request(new String[]{TEST_INDEX}); + PlainActionFuture<Response> listener = new PlainActionFuture<>(); + + ClusterBlocks.Builder block = ClusterBlocks.builder() + .addGlobalBlock(new ClusterBlock(1, "", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); + clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + try { + action.new AsyncAction(request, listener).start(); + fail("expected ClusterBlockException"); + } catch (ClusterBlockException expected) { + + } + } + + public void testRequestBlock() { + Request request = new Request(new String[]{TEST_INDEX}); + PlainActionFuture<Response> listener = new PlainActionFuture<>(); + + ClusterBlocks.Builder block = ClusterBlocks.builder() + .addIndexBlock(TEST_INDEX, new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); + clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); + try { + action.new AsyncAction(request, listener).start(); + fail("expected ClusterBlockException"); + } catch (ClusterBlockException expected) { + + } + } + + public void testOneRequestIsSentToEachNodeHoldingAShard() { + Request request = new Request(new String[]{TEST_INDEX}); + PlainActionFuture<Response> listener = new PlainActionFuture<>(); + + action.new AsyncAction(request, listener).start(); + Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.capturedRequestsByTargetNode(); + + ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX}); + Set<String> set = new HashSet<>(); + for (ShardRouting shard : shardIt.asUnordered()) { + set.add(shard.currentNodeId()); + } + + // check a request was sent to the right number of nodes + assertEquals(set.size(), capturedRequests.size()); + + // check requests were sent to the right nodes + assertEquals(set, capturedRequests.keySet()); + for (Map.Entry<String, List<CapturingTransport.CapturedRequest>> entry : capturedRequests.entrySet()) { + // check one request was sent to each node + assertEquals(1, entry.getValue().size()); + } + } + + public void testOperationExecution() throws Exception { + ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX}); + Set<ShardRouting> shards = new HashSet<>(); + String nodeId = shardIt.asUnordered().iterator().next().currentNodeId(); + for (ShardRouting shard : shardIt.asUnordered()) { + if (nodeId.equals(shard.currentNodeId())) { + shards.add(shard); + } + } + final TransportBroadcastByNodeAction.BroadcastByNodeTransportRequestHandler handler = + action.new BroadcastByNodeTransportRequestHandler(); + + TestTransportChannel channel = new TestTransportChannel(); + + handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel); + + // check the operation was executed only on the expected shards + assertEquals(shards, action.getResults().keySet()); + + TransportResponse response = channel.getCapturedResponse(); + assertTrue(response instanceof TransportBroadcastByNodeAction.NodeResponse); + TransportBroadcastByNodeAction.NodeResponse nodeResponse = (TransportBroadcastByNodeAction.NodeResponse)response; + + // check the operation was executed on the correct node + assertEquals("node id", nodeId, nodeResponse.getNodeId()); + + int successfulShards = 0; + int failedShards = 0; + for (Object result : action.getResults().values()) { + if (!(result instanceof ElasticsearchException)) { + successfulShards++; + } else { + failedShards++; + } + } + + // check the operation results + assertEquals("successful shards", successfulShards, nodeResponse.getSuccessfulShards()); + assertEquals("total shards", action.getResults().size(), nodeResponse.getTotalShards()); + assertEquals("failed shards", failedShards, nodeResponse.getExceptions().size()); + List<BroadcastShardOperationFailedException> exceptions = nodeResponse.getExceptions(); + for (BroadcastShardOperationFailedException exception : exceptions) { + assertThat(exception.getMessage(), is("operation indices:admin/test failed")); + assertThat(exception, hasToString(containsString("operation failed"))); + } + } + + public void testResultAggregation() throws ExecutionException, InterruptedException { + Request request = new Request(new String[]{TEST_INDEX}); + PlainActionFuture<Response> listener = new PlainActionFuture<>(); + + action.new AsyncAction(request, listener).start(); + Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.capturedRequestsByTargetNode(); + transport.clear(); + + ShardsIterator shardIt = clusterService.state().getRoutingTable().allShards(new String[]{TEST_INDEX}); + Map<String, List<ShardRouting>> map = new HashMap<>(); + for (ShardRouting shard : shardIt.asUnordered()) { + if (!map.containsKey(shard.currentNodeId())) { + map.put(shard.currentNodeId(), new ArrayList<ShardRouting>()); + } + map.get(shard.currentNodeId()).add(shard); + } + + int totalShards = 0; + int totalSuccessfulShards = 0; + int totalFailedShards = 0; + for (Map.Entry<String, List<CapturingTransport.CapturedRequest>> entry : capturedRequests.entrySet()) { + List<BroadcastShardOperationFailedException> exceptions = new ArrayList<>(); + long requestId = entry.getValue().get(0).requestId; + if (rarely()) { + // simulate node failure + totalShards += map.get(entry.getKey()).size(); + totalFailedShards += map.get(entry.getKey()).size(); + transport.handleResponse(requestId, new Exception()); + } else { + List<ShardRouting> shards = map.get(entry.getKey()); + List<TransportBroadcastByNodeAction.EmptyResult> shardResults = new ArrayList<>(); + for (ShardRouting shard : shards) { + totalShards++; + if (rarely()) { + // simulate operation failure + totalFailedShards++; + exceptions.add(new BroadcastShardOperationFailedException(shard.shardId(), "operation indices:admin/test failed")); + } else { + shardResults.add(TransportBroadcastByNodeAction.EmptyResult.INSTANCE); + } + } + totalSuccessfulShards += shardResults.size(); + TransportBroadcastByNodeAction.NodeResponse nodeResponse = action.new NodeResponse(entry.getKey(), shards.size(), shardResults, exceptions); + transport.handleResponse(requestId, nodeResponse); + } + } + + Response response = listener.get(); + assertEquals("total shards", totalShards, response.getTotalShards()); + assertEquals("successful shards", totalSuccessfulShards, response.getSuccessfulShards()); + assertEquals("failed shards", totalFailedShards, response.getFailedShards()); + assertEquals("accumulated exceptions", totalFailedShards, response.getShardFailures().length); + } + + public class TestTransportChannel implements TransportChannel { + private TransportResponse capturedResponse; + + public TransportResponse getCapturedResponse() { + return capturedResponse; + } + + @Override + public String action() { + return null; + } + + @Override + public String getProfileName() { + return ""; + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + capturedResponse = response; + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + } + + @Override + public void sendResponse(Throwable error) throws IOException { + } + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ShardReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ShardReplicationTests.java index d08840b0771..d7fb2ddd54d 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ShardReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ShardReplicationTests.java @@ -661,7 +661,6 @@ public class ShardReplicationTests extends ESTestCase { public AtomicInteger processedOnReplicas = new AtomicInteger(); Request() { - this.operationThreaded(randomBoolean()); } Request(ShardId shardId) { diff --git a/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java index ad2d1a79eda..e7ef3259199 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java @@ -19,7 +19,6 @@ package org.elasticsearch.benchmark.recovery; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -30,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.node.Node; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.transport.TransportModule; @@ -128,12 +128,12 @@ public class ReplicaRecoveryBenchmark { long currentTime = System.currentTimeMillis(); long currentDocs = indexer.totalIndexedDocs(); RecoveryResponse recoveryResponse = client1.admin().indices().prepareRecoveries(INDEX_NAME).setActiveOnly(true).get(); - List<ShardRecoveryResponse> indexRecoveries = recoveryResponse.shardResponses().get(INDEX_NAME); + List<RecoveryState> indexRecoveries = recoveryResponse.shardRecoveryStates().get(INDEX_NAME); long translogOps; long bytes; if (indexRecoveries.size() > 0) { - translogOps = indexRecoveries.get(0).recoveryState().getTranslog().recoveredOperations(); - bytes = recoveryResponse.shardResponses().get(INDEX_NAME).get(0).recoveryState().getIndex().recoveredBytes(); + translogOps = indexRecoveries.get(0).getTranslog().recoveredOperations(); + bytes = recoveryResponse.shardRecoveryStates().get(INDEX_NAME).get(0).getIndex().recoveredBytes(); } else { bytes = lastBytes = 0; translogOps = lastTranslogOps = 0; diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java index 464c67ae84d..cd06b7b0579 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java @@ -20,8 +20,6 @@ package org.elasticsearch.benchmark.search.aggregations; import com.carrotsearch.hppc.ObjectScatterSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.collect.Lists; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -41,6 +39,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Random; @@ -208,7 +207,7 @@ public class SubAggregationSearchCollectModeBenchmark { COUNT = client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(); System.out.println("--> Number of docs in index: " + COUNT); - List<StatsResult> stats = Lists.newArrayList(); + List<StatsResult> stats = new ArrayList<>(); stats.add(runTest("0000", new SubAggCollectionMode[] {SubAggCollectionMode.DEPTH_FIRST,SubAggCollectionMode.DEPTH_FIRST, SubAggCollectionMode.DEPTH_FIRST, SubAggCollectionMode.DEPTH_FIRST})); stats.add(runTest("0001", new SubAggCollectionMode[] {SubAggCollectionMode.DEPTH_FIRST,SubAggCollectionMode.DEPTH_FIRST, SubAggCollectionMode.DEPTH_FIRST, SubAggCollectionMode.BREADTH_FIRST})); stats.add(runTest("0010", new SubAggCollectionMode[] {SubAggCollectionMode.DEPTH_FIRST,SubAggCollectionMode.DEPTH_FIRST, SubAggCollectionMode.BREADTH_FIRST, SubAggCollectionMode.DEPTH_FIRST})); diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java index e278feb3644..7240ee308f2 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java @@ -20,8 +20,6 @@ package org.elasticsearch.benchmark.search.aggregations; import com.carrotsearch.hppc.ObjectScatterSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.collect.Lists; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -42,6 +40,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Random; @@ -235,7 +234,7 @@ public class TermsAggregationSearchBenchmark { System.out.println("--> Number of docs in index: " + COUNT); - List<StatsResult> stats = Lists.newArrayList(); + List<StatsResult> stats = new ArrayList<>(); stats.add(terms("terms_agg_s", Method.AGGREGATION, "s_value", null)); stats.add(terms("terms_agg_s_dv", Method.AGGREGATION, "s_value_dv", null)); stats.add(terms("terms_agg_map_s", Method.AGGREGATION, "s_value", "map")); diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TimeDataHistogramAggregationBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TimeDataHistogramAggregationBenchmark.java index 96203ccba19..5b3984d19cb 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TimeDataHistogramAggregationBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/aggregations/TimeDataHistogramAggregationBenchmark.java @@ -19,8 +19,6 @@ package org.elasticsearch.benchmark.search.aggregations; -import com.google.common.collect.Lists; - import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; @@ -44,6 +42,7 @@ import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilders; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -163,7 +162,7 @@ public class TimeDataHistogramAggregationBenchmark { setMapping(ACCEPTABLE_OVERHEAD_RATIO, MEMORY_FORMAT); warmUp("hist_l", "l_value", MATCH_PERCENTAGE); - List<StatsResult> stats = Lists.newArrayList(); + List<StatsResult> stats = new ArrayList<>(); stats.add(measureAgg("hist_l", "l_value", MATCH_PERCENTAGE)); NodesStatsResponse nodeStats = client.admin().cluster().prepareNodesStats(nodes[0].settings().get("name")).clear() diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index db464c31516..15818283f72 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.bwcompat; import com.google.common.base.Predicate; import com.google.common.util.concurrent.ListenableFuture; + import org.apache.lucene.index.IndexWriter; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -40,6 +41,7 @@ import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -330,6 +332,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { assertNewReplicasWork(indexName); assertUpgradeWorks(indexName, isLatestLuceneVersion(version)); assertDeleteByQueryWorked(indexName, version); + assertPositionIncrementGapDefaults(indexName, version); unloadIndex(indexName); } @@ -433,7 +436,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // #10067: create-bwc-index.py deleted any doc with long_sort:[10-20] void assertDeleteByQueryWorked(String indexName, Version version) throws Exception { - if (version.onOrBefore(Version.V_1_0_0_Beta2)) { + if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) { // TODO: remove this once #10262 is fixed return; } @@ -442,6 +445,14 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { assertEquals(0, searchReq.get().getHits().getTotalHits()); } + void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception { + if (version.before(Version.V_2_0_0_beta1)) { + StringFieldMapperPositionIncrementGapTests.assertGapIsZero(client(), indexName, "doc"); + } else { + StringFieldMapperPositionIncrementGapTests.assertGapIsOneHundred(client(), indexName, "doc"); + } + } + void assertUpgradeWorks(String indexName, boolean alreadyLatest) throws Exception { if (alreadyLatest == false) { UpgradeIT.assertNotUpgraded(client(), indexName); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 2130ea57267..14bcfc79076 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -43,18 +43,15 @@ import java.net.URISyntaxException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.SortedSet; import java.util.TreeSet; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; @ClusterScope(scope = Scope.TEST) public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { @@ -144,7 +141,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { } private List<String> listRepoVersions(String prefix) throws Exception { - List<String> repoVersions = newArrayList(); + List<String> repoVersions = new ArrayList<>(); Path repoFiles = reposRoot(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(repoFiles, prefix + "-*.zip")) { for (Path entry : stream) { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 8805b895a00..388a60a80e9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -170,7 +170,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { infoService.addListener(listener); ClusterInfo info = listener.get(); assertNotNull("info should not be null", info); - Map<String, DiskUsage> usages = info.getNodeDiskUsages(); + Map<String, DiskUsage> usages = info.getNodeLeastAvailableDiskUsages(); Map<String, Long> shardSizes = info.shardSizes; assertNotNull(usages); assertNotNull(shardSizes); @@ -203,14 +203,14 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { infoService.updateOnce(); ClusterInfo info = listener.get(); assertNotNull("failed to collect info", info); - assertThat("some usages are populated", info.getNodeDiskUsages().size(), Matchers.equalTo(2)); + assertThat("some usages are populated", info.getNodeLeastAvailableDiskUsages().size(), Matchers.equalTo(2)); assertThat("some shard sizes are populated", info.shardSizes.size(), greaterThan(0)); MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, internalTestCluster.getMasterName()); final AtomicBoolean timeout = new AtomicBoolean(false); - final Set<String> blockedActions = ImmutableSet.of(NodesStatsAction.NAME, NodesStatsAction.NAME + "[n]", IndicesStatsAction.NAME, IndicesStatsAction.NAME + "[s]"); + final Set<String> blockedActions = ImmutableSet.of(NodesStatsAction.NAME, NodesStatsAction.NAME + "[n]", IndicesStatsAction.NAME, IndicesStatsAction.NAME + "[n]"); // drop all outgoing stats requests to force a timeout. for (DiscoveryNode node : internalTestCluster.clusterService().state().getNodes()) { mockTransportService.addDelegate(node, new MockTransportService.DelegateTransport(mockTransportService.original()) { @@ -237,7 +237,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { // node info will time out both on the request level on the count down latch. this means // it is likely to update the node disk usage based on the one response that came be from local // node. - assertThat(info.getNodeDiskUsages().size(), greaterThanOrEqualTo(1)); + assertThat(info.getNodeLeastAvailableDiskUsages().size(), greaterThanOrEqualTo(1)); // indices is guaranteed to time out on the latch, not updating anything. assertThat(info.shardSizes.size(), greaterThan(1)); @@ -258,7 +258,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { infoService.updateOnce(); info = listener.get(); assertNotNull("info should not be null", info); - assertThat(info.getNodeDiskUsages().size(), equalTo(0)); + assertThat(info.getNodeLeastAvailableDiskUsages().size(), equalTo(0)); assertThat(info.shardSizes.size(), equalTo(0)); // check we recover @@ -267,7 +267,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { infoService.updateOnce(); info = listener.get(); assertNotNull("info should not be null", info); - assertThat(info.getNodeDiskUsages().size(), equalTo(2)); + assertThat(info.getNodeLeastAvailableDiskUsages().size(), equalTo(2)); assertThat(info.shardSizes.size(), greaterThan(0)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index df9c1883dd8..04ae502e654 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -19,16 +19,25 @@ package org.elasticsearch.cluster; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.test.ESTestCase; import org.junit.Test; +import java.util.HashMap; +import java.util.Map; + import static org.hamcrest.Matchers.equalTo; public class DiskUsageTests extends ESTestCase { @Test public void diskUsageCalcTest() { - DiskUsage du = new DiskUsage("node1", "n1", 100, 40); + DiskUsage du = new DiskUsage("node1", "n1", "random", 100, 40); assertThat(du.getFreeDiskAsPercentage(), equalTo(40.0)); assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - 40.0)); assertThat(du.getFreeBytes(), equalTo(40L)); @@ -37,19 +46,19 @@ public class DiskUsageTests extends ESTestCase { // Test that DiskUsage handles invalid numbers, as reported by some // filesystems (ZFS & NTFS) - DiskUsage du2 = new DiskUsage("node1", "n1", 100, 101); + DiskUsage du2 = new DiskUsage("node1", "n1","random", 100, 101); assertThat(du2.getFreeDiskAsPercentage(), equalTo(101.0)); assertThat(du2.getFreeBytes(), equalTo(101L)); assertThat(du2.getUsedBytes(), equalTo(-1L)); assertThat(du2.getTotalBytes(), equalTo(100L)); - DiskUsage du3 = new DiskUsage("node1", "n1", -1, -1); + DiskUsage du3 = new DiskUsage("node1", "n1", "random",-1, -1); assertThat(du3.getFreeDiskAsPercentage(), equalTo(100.0)); assertThat(du3.getFreeBytes(), equalTo(-1L)); assertThat(du3.getUsedBytes(), equalTo(0L)); assertThat(du3.getTotalBytes(), equalTo(-1L)); - DiskUsage du4 = new DiskUsage("node1", "n1", 0, 0); + DiskUsage du4 = new DiskUsage("node1", "n1","random", 0, 0); assertThat(du4.getFreeDiskAsPercentage(), equalTo(100.0)); assertThat(du4.getFreeBytes(), equalTo(0L)); assertThat(du4.getUsedBytes(), equalTo(0L)); @@ -62,7 +71,7 @@ public class DiskUsageTests extends ESTestCase { for (int i = 1; i < iters; i++) { long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE); long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE); - DiskUsage du = new DiskUsage("random", "random", total, free); + DiskUsage du = new DiskUsage("random", "random", "random", total, free); if (total == 0) { assertThat(du.getFreeBytes(), equalTo(free)); assertThat(du.getTotalBytes(), equalTo(0L)); @@ -78,4 +87,52 @@ public class DiskUsageTests extends ESTestCase { } } } + + public void testFillDiskUsage() { + Map<String, DiskUsage> newLeastAvaiableUsages = new HashMap<>(); + Map<String, DiskUsage> newMostAvaiableUsages = new HashMap<>(); + FsInfo.Path[] node1FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80), + new FsInfo.Path("/least", "/dev/sdb", 200, 190, 70), + new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280), + }; + FsInfo.Path[] node2FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/least_most", "/dev/sda", 100, 90, 80), + }; + + FsInfo.Path[] node3FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/least", "/dev/sda", 100, 90, 70), + new FsInfo.Path("/most", "/dev/sda", 100, 90, 80), + }; + NodeStats[] nodeStats = new NodeStats[] { + new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null), + new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null), + new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null) + }; + InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); + DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1"); + DiskUsage mostNode_1 = newMostAvaiableUsages.get("node_1"); + assertDiskUsage(mostNode_1, node1FSInfo[2]); + assertDiskUsage(leastNode_1, node1FSInfo[1]); + + DiskUsage leastNode_2 = newLeastAvaiableUsages.get("node_2"); + DiskUsage mostNode_2 = newMostAvaiableUsages.get("node_2"); + assertDiskUsage(leastNode_2, node2FSInfo[0]); + assertDiskUsage(mostNode_2, node2FSInfo[0]); + + DiskUsage leastNode_3 = newLeastAvaiableUsages.get("node_3"); + DiskUsage mostNode_3 = newMostAvaiableUsages.get("node_3"); + assertDiskUsage(leastNode_3, node3FSInfo[0]); + assertDiskUsage(mostNode_3, node3FSInfo[1]); + } + + private void assertDiskUsage(DiskUsage usage, FsInfo.Path path) { + assertEquals(usage.toString(), usage.getPath(), path.getPath()); + assertEquals(usage.toString(), usage.getTotalBytes(), path.getTotal().bytes()); + assertEquals(usage.toString(), usage.getFreeBytes(), path.getAvailable().bytes()); + + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 33ae26e6ebe..80ced595922 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -63,9 +63,9 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { ClusterService clusterService, ThreadPool threadPool) { super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); this.clusterName = ClusterName.clusterNameFromSettings(settings); - stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", 100, 100)); - stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", 100, 100)); - stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", 100, 100)); + stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", "_na_", 100, 100)); + stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", "_na_", 100, 100)); + stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", "_na_", 100, 100)); } public void setN1Usage(String nodeName, DiskUsage newUsage) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTest.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTest.java index 2cbc364bafe..9a0cbb2bcca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTest.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTest.java @@ -243,4 +243,25 @@ public class RoutingTableTest extends ESAllocationTestCase { fail("Calling with non-existing index should be ignored at the moment"); } } + + public void testAllShardsForMultipleIndices() { + assertThat(this.emptyRoutingTable.allShards(new String[0]).size(), is(0)); + + assertThat(this.testRoutingTable.allShards(new String[]{TEST_INDEX_1}).size(), is(this.shardsPerIndex)); + + initPrimaries(); + assertThat(this.testRoutingTable.allShards(new String[]{TEST_INDEX_1}).size(), is(this.shardsPerIndex)); + + startInitializingShards(TEST_INDEX_1); + assertThat(this.testRoutingTable.allShards(new String[]{TEST_INDEX_1}).size(), is(this.shardsPerIndex)); + + startInitializingShards(TEST_INDEX_2); + assertThat(this.testRoutingTable.allShards(new String[]{TEST_INDEX_1, TEST_INDEX_2}).size(), is(this.totalNumberOfShards)); + + try { + this.testRoutingTable.allShards(new String[]{TEST_INDEX_1, "not_exists"}); + } catch (IndexNotFoundException e) { + fail("Calling with non-existing index should be ignored at the moment"); + } + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index b24744179d6..a35e9f4af43 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Lists; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -34,6 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.test.ESAllocationTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -390,7 +390,7 @@ public class AddIncrementallyTests extends ESAllocationTestCase { private ClusterState removeNodes(ClusterState clusterState, AllocationService service, int numNodes) { logger.info("Removing [{}] nodes", numNodes); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()); - ArrayList<DiscoveryNode> discoveryNodes = Lists.newArrayList(clusterState.nodes()); + ArrayList<DiscoveryNode> discoveryNodes = CollectionUtils.iterableAsArrayList(clusterState.nodes()); Collections.shuffle(discoveryNodes, getRandom()); for (DiscoveryNode node : discoveryNodes) { nodes.remove(node.id()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index e512fcdfbd3..e6a0ec4bc97 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -59,7 +59,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { AllocationService strategy = createAllocationService(Settings.EMPTY, new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { - return new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP) { + return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) { @@ -118,7 +118,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { final AllocationService allocation = createAllocationService(Settings.EMPTY, new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { - return new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP) { + return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 56510384246..4dd88501ec2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -66,7 +66,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { - return new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP) { + return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { if (shardRouting.index().equals("test")) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index b2bd29d21b9..a6b0e414f59 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -25,20 +25,20 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Test; +import java.util.ArrayList; import java.util.List; import java.util.Set; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.routing.ShardRoutingState.*; import static org.elasticsearch.cluster.routing.allocation.RoutingNodesUtils.numberOfShardsOfType; @@ -239,7 +239,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { logger.info("Adding " + (numberOfIndices / 2) + " nodes"); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - List<DiscoveryNode> nodes = newArrayList(); + List<DiscoveryNode> nodes = new ArrayList<>(); for (int i = 0; i < (numberOfIndices / 2); i++) { nodesBuilder.put(newNode("node" + i)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 41e995c367d..c202f46f688 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Test; import java.util.*; @@ -65,15 +66,15 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node1", new DiskUsage("node1", "node1", 100, 10)); // 90% used - usages.put("node2", new DiskUsage("node2", "node2", 100, 35)); // 65% used - usages.put("node3", new DiskUsage("node3", "node3", 100, 60)); // 40% used - usages.put("node4", new DiskUsage("node4", "node4", 100, 80)); // 20% used + usages.put("node1", new DiskUsage("node1", "node1", "_na_", 100, 10)); // 90% used + usages.put("node2", new DiskUsage("node2", "node2", "_na_", 100, 35)); // 65% used + usages.put("node3", new DiskUsage("node3", "node3", "_na_", 100, 60)); // 40% used + usages.put("node4", new DiskUsage("node4", "node4", "_na_", 100, 80)); // 20% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -92,7 +93,6 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // noop } }; - AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") @@ -259,16 +259,16 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "9b").build(); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node1", new DiskUsage("node1", "n1", 100, 10)); // 90% used - usages.put("node2", new DiskUsage("node2", "n2", 100, 10)); // 90% used - usages.put("node3", new DiskUsage("node3", "n3", 100, 60)); // 40% used - usages.put("node4", new DiskUsage("node4", "n4", 100, 80)); // 20% used - usages.put("node5", new DiskUsage("node5", "n5", 100, 85)); // 15% used + usages.put("node1", new DiskUsage("node1", "n1", "_na_", 100, 10)); // 90% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 10)); // 90% used + usages.put("node3", new DiskUsage("node3", "n3", "_na_", 100, 60)); // 40% used + usages.put("node4", new DiskUsage("node4", "n4", "_na_", 100, 80)); // 20% used + usages.put("node5", new DiskUsage("node5", "n5", "_na_", 100, 85)); // 15% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -329,8 +329,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { logger.info("--> nodeWithoutPrimary: {}", nodeWithoutPrimary); // Make node without the primary now habitable to replicas - usages.put(nodeWithoutPrimary, new DiskUsage(nodeWithoutPrimary, "", 100, 35)); // 65% used - final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + usages.put(nodeWithoutPrimary, new DiskUsage(nodeWithoutPrimary, "", "_na_", 100, 35)); // 65% used + final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); cis = new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { @@ -524,12 +524,12 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "71%").build(); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node1", new DiskUsage("node1", "n1", 100, 31)); // 69% used - usages.put("node2", new DiskUsage("node2", "n2", 100, 1)); // 99% used + usages.put("node1", new DiskUsage("node1", "n1", "_na_", 100, 31)); // 69% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 1)); // 99% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -590,13 +590,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.85).build(); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node2", new DiskUsage("node2", "node2", 100, 50)); // 50% used - usages.put("node3", new DiskUsage("node3", "node3", 100, 0)); // 100% used + usages.put("node2", new DiskUsage("node2", "node2", "_na_", 100, 50)); // 50% used + usages.put("node3", new DiskUsage("node3", "node3", "_na_", 100, 0)); // 100% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); // 10 bytes - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -661,8 +661,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node2", new DiskUsage("node2", "n2", 100, 50)); // 50% used - usages.put("node3", new DiskUsage("node3", "n3", 100, 0)); // 100% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 50)); // 50% used + usages.put("node3", new DiskUsage("node3", "n3", "_na_", 100, 0)); // 100% used DiskUsage node1Usage = decider.averageUsage(rn, usages); assertThat(node1Usage.getTotalBytes(), equalTo(100L)); @@ -675,10 +675,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node2", new DiskUsage("node2", "n2", 100, 50)); // 50% used - usages.put("node3", new DiskUsage("node3", "n3", 100, 0)); // 100% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 50)); // 50% used + usages.put("node3", new DiskUsage("node3", "n3", "_na_", 100, 0)); // 100% used - Double after = decider.freeDiskPercentageAfterShardAssigned(new DiskUsage("node2", "n2", 100, 30), 11L); + Double after = decider.freeDiskPercentageAfterShardAssigned(new DiskUsage("node2", "n2", "_na_", 100, 30), 11L); assertThat(after, equalTo(19.0)); } @@ -691,16 +691,16 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, 0.8).build(); Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node1", new DiskUsage("node1", "n1", 100, 40)); // 60% used - usages.put("node2", new DiskUsage("node2", "n2", 100, 40)); // 60% used - usages.put("node2", new DiskUsage("node3", "n3", 100, 40)); // 60% used + usages.put("node1", new DiskUsage("node1", "n1", "_na_", 100, 40)); // 60% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 40)); // 60% used + usages.put("node2", new DiskUsage("node3", "n3", "_na_", 100, 40)); // 60% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 14L); // 14 bytes shardSizes.put("[test][0][r]", 14L); shardSizes.put("[test2][0][p]", 1L); // 1 bytes shardSizes.put("[test2][0][r]", 1L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -797,13 +797,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // We have an index with 2 primary shards each taking 40 bytes. Each node has 100 bytes available Map<String, DiskUsage> usages = new HashMap<>(); - usages.put("node1", new DiskUsage("node1", "n1", 100, 20)); // 80% used - usages.put("node2", new DiskUsage("node2", "n2", 100, 100)); // 0% used + usages.put("node1", new DiskUsage("node1", "n1", "_na_", 100, 20)); // 80% used + usages.put("node2", new DiskUsage("node2", "n2", "_na_", 100, 100)); // 0% used Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 40L); shardSizes.put("[test][1][p]", 40L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes)); DiskThresholdDecider diskThresholdDecider = new DiskThresholdDecider(diskSettings); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 0be13948e42..9d3b4a60e34 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -20,15 +20,15 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterInfo; -import org.elasticsearch.cluster.ClusterInfoService; -import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingHelper; -import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.node.settings.NodeSettingsService; @@ -76,11 +76,11 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { applySettings.onRefreshSettings(newSettings); assertThat("high threshold bytes should be unset", - decider.getFreeBytesThresholdHigh(), equalTo(ByteSizeValue.parseBytesSizeValue("0b", "test"))); + decider.getFreeBytesThresholdHigh(), equalTo(ByteSizeValue.parseBytesSizeValue("0b", "test"))); assertThat("high threshold percentage should be changed", decider.getFreeDiskThresholdHigh(), equalTo(30.0d)); assertThat("low threshold bytes should be set to 500mb", - decider.getFreeBytesThresholdLow(), equalTo(ByteSizeValue.parseBytesSizeValue("500mb", "test"))); + decider.getFreeBytesThresholdLow(), equalTo(ByteSizeValue.parseBytesSizeValue("500mb", "test"))); assertThat("low threshold bytes should be unset", decider.getFreeDiskThresholdLow(), equalTo(0.0d)); assertThat("reroute interval should be changed to 30 seconds", @@ -89,13 +89,120 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { assertFalse("relocations should now be disabled", decider.isIncludeRelocations()); } + public void testCanAllocateUsesMaxAvailableSpace() { + NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; + DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); + + ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); + DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() + .put(node_0) + .put(node_1) + ).build(); + + // actual test -- after all that bloat :) + Map<String, DiskUsage> leastAvailableUsages = new HashMap<>(); + leastAvailableUsages.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, 0)); // all full + leastAvailableUsages.put("node_1", new DiskUsage("node_1", "node_1", "_na_", 100, 0)); // all full + + Map<String, DiskUsage> mostAvailableUsage = new HashMap<>(); + mostAvailableUsage.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, randomIntBetween(20, 100))); // 20 - 99 percent since after allocation there must be at least 10% left and shard is 10byte + mostAvailableUsage.put("node_1", new DiskUsage("node_1", "node_1", "_na_", 100, randomIntBetween(0, 10))); // this is weird and smells like a bug! it should be up to 20%? + + Map<String, Long> shardSizes = new HashMap<>(); + shardSizes.put("[test][0][p]", 10L); // 10 bytes + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(leastAvailableUsages), Collections.unmodifiableMap(mostAvailableUsage), Collections.unmodifiableMap(shardSizes)); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState.nodes(), clusterInfo); + assertEquals(mostAvailableUsage.toString(), Decision.YES, decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation)); + assertEquals(mostAvailableUsage.toString(), Decision.NO, decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation)); + } + + + public void testCanRemainUsesLeastAvailableSpace() { + NodeSettingsService nss = new NodeSettingsService(Settings.EMPTY); + ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; + DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); + + + DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); + DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); + + ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRoutingHelper.initialize(test_0, node_0.getId()); + ShardRoutingHelper.moveToStarted(test_0); + + + ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRoutingHelper.initialize(test_1, node_1.getId()); + ShardRoutingHelper.moveToStarted(test_1); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + logger.info("--> adding two nodes"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() + .put(node_0) + .put(node_1) + ).build(); + + // actual test -- after all that bloat :) + Map<String, DiskUsage> leastAvailableUsages = new HashMap<>(); + leastAvailableUsages.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, 10)); // 90% used + leastAvailableUsages.put("node_1", new DiskUsage("node_1", "node_1", "_na_", 100, 9)); // 91% used + + Map<String, DiskUsage> mostAvailableUsage = new HashMap<>(); + mostAvailableUsage.put("node_0", new DiskUsage("node_0", "node_0", "_na_", 100, 90)); // 10% used + mostAvailableUsage.put("node_1", new DiskUsage("node_1", "node_1", "_na_", 100, 90)); // 10% used + + Map<String, Long> shardSizes = new HashMap<>(); + shardSizes.put("[test][0][p]", 10L); // 10 bytes + shardSizes.put("[test][1][p]", 10L); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(leastAvailableUsages), Collections.unmodifiableMap(mostAvailableUsage), Collections.unmodifiableMap(shardSizes)); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState.nodes(), clusterInfo); + assertEquals(Decision.YES, decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation)); + assertEquals(Decision.NO, decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation)); + try { + decider.canRemain(test_0, new RoutingNode("node_1", node_1), allocation); + fail("not allocated on this node"); + } catch (IllegalArgumentException ex) { + // not allocated on that node + } + try { + decider.canRemain(test_1, new RoutingNode("node_0", node_0), allocation); + fail("not allocated on this node"); + } catch (IllegalArgumentException ex) { + // not allocated on that node + } + } + + public void testShardSizeAndRelocatingSize() { Map<String, Long> shardSizes = new HashMap<>(); shardSizes.put("[test][0][r]", 10L); shardSizes.put("[test][1][r]", 100L); shardSizes.put("[test][2][r]", 1000L); shardSizes.put("[other][0][p]", 10000L); - ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, shardSizes); + ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP, shardSizes); ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, "node1"); ShardRoutingHelper.moveToStarted(test_0); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 8df9117a03e..a90259826a4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -23,7 +23,11 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.InternalClusterInfoService; +import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.common.settings.Settings; @@ -34,16 +38,14 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.util.Collection; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.*; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MockDiskUsagesIT extends ESIntegTestCase { @@ -80,9 +82,9 @@ public class MockDiskUsagesIT extends ESIntegTestCase { // Start with all nodes at 50% usage final MockInternalClusterInfoService cis = (MockInternalClusterInfoService) internalCluster().getInstance(ClusterInfoService.class, internalCluster().getMasterName()); - cis.setN1Usage(nodes.get(0), new DiskUsage(nodes.get(0), "n1", 100, 50)); - cis.setN2Usage(nodes.get(1), new DiskUsage(nodes.get(1), "n2", 100, 50)); - cis.setN3Usage(nodes.get(2), new DiskUsage(nodes.get(2), "n3", 100, 50)); + cis.setN1Usage(nodes.get(0), new DiskUsage(nodes.get(0), "n1", "_na_", 100, 50)); + cis.setN2Usage(nodes.get(1), new DiskUsage(nodes.get(1), "n2", "_na_", 100, 50)); + cis.setN3Usage(nodes.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 50)); client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder() .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, randomFrom("20b", "80%")) @@ -101,12 +103,12 @@ public class MockDiskUsagesIT extends ESIntegTestCase { @Override public void run() { ClusterInfo info = cis.getClusterInfo(); - logger.info("--> got: {} nodes", info.getNodeDiskUsages().size()); - assertThat(info.getNodeDiskUsages().size(), greaterThan(0)); + logger.info("--> got: {} nodes", info.getNodeLeastAvailableDiskUsages().size()); + assertThat(info.getNodeLeastAvailableDiskUsages().size(), greaterThan(0)); } }); - final List<String> realNodeNames = newArrayList(); + final List<String> realNodeNames = new ArrayList<>(); ClusterStateResponse resp = client().admin().cluster().prepareState().get(); Iterator<RoutingNode> iter = resp.getState().getRoutingNodes().iterator(); while (iter.hasNext()) { @@ -117,9 +119,9 @@ public class MockDiskUsagesIT extends ESIntegTestCase { } // Update the disk usages so one node has now passed the high watermark - cis.setN1Usage(realNodeNames.get(0), new DiskUsage(nodes.get(0), "n1", 100, 50)); - cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", 100, 50)); - cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", 100, 0)); // nothing free on node3 + cis.setN1Usage(realNodeNames.get(0), new DiskUsage(nodes.get(0), "n1", "_na_", 100, 50)); + cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", "_na_", 100, 50)); + cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 0)); // nothing free on node3 // Retrieve the count of shards on each node final Map<String, Integer> nodesToShardCount = newHashMap(); @@ -142,9 +144,9 @@ public class MockDiskUsagesIT extends ESIntegTestCase { }); // Update the disk usages so one node is now back under the high watermark - cis.setN1Usage(realNodeNames.get(0), new DiskUsage(nodes.get(0), "n1", 100, 50)); - cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", 100, 50)); - cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", 100, 50)); // node3 has free space now + cis.setN1Usage(realNodeNames.get(0), new DiskUsage(nodes.get(0), "n1", "_na_", 100, 50)); + cis.setN2Usage(realNodeNames.get(1), new DiskUsage(nodes.get(1), "n2", "_na_", 100, 50)); + cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 50)); // node3 has free space now // Retrieve the count of shards on each node nodesToShardCount.clear(); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 9c2d02617dc..b7670eaafe8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -33,7 +33,6 @@ import org.junit.Test; import java.util.Collection; import java.util.Collections; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/common/StringsTests.java b/core/src/test/java/org/elasticsearch/common/StringsTests.java index 584428541e4..07465acaaae 100644 --- a/core/src/test/java/org/elasticsearch/common/StringsTests.java +++ b/core/src/test/java/org/elasticsearch/common/StringsTests.java @@ -29,6 +29,8 @@ public class StringsTests extends ESTestCase { assertEquals("fooBar", Strings.toCamelCase("foo_bar")); assertEquals("fooBarFooBar", Strings.toCamelCase("foo_bar_foo_bar")); assertEquals("fooBar", Strings.toCamelCase("foo_bar_")); + assertEquals("_foo", Strings.toCamelCase("_foo")); + assertEquals("_fooBar", Strings.toCamelCase("_foo_bar_")); } public void testSubstring() { diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 49ca12e0f84..63e0d50f39b 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.common.lucene.index; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -49,13 +47,13 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -143,7 +141,7 @@ public class FreqTermsEnumTests extends ESTestCase { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw, true); - List<Term> filterTerms = Lists.newArrayList(); + List<Term> filterTerms = new ArrayList<>(); for (int docId = 0; docId < reader.maxDoc(); docId++) { Document doc = reader.document(docId); addFreqs(doc, referenceAll); @@ -207,7 +205,7 @@ public class FreqTermsEnumTests extends ESTestCase { private void assertAgainstReference(FreqTermsEnum termsEnum, Map<String, FreqHolder> reference, boolean docFreq, boolean totalTermFreq) throws Exception { int cycles = randomIntBetween(1, 5); for (int i = 0; i < cycles; i++) { - List<String> terms = Lists.newArrayList(Arrays.asList(this.terms)); + List<String> terms = new ArrayList<>(Arrays.asList(this.terms)); Collections.shuffle(terms, getRandom()); for (String term : terms) { diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java index 142d60871aa..0f90b8c3728 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java @@ -19,19 +19,19 @@ package org.elasticsearch.common.settings.loader; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import org.junit.Test; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; /** * */ public class JsonSettingsLoaderTests extends ESTestCase { - @Test public void testSimpleJsonSettings() throws Exception { String json = "/org/elasticsearch/common/settings/loader/test-settings.json"; @@ -50,4 +50,17 @@ public class JsonSettingsLoaderTests extends ESTestCase { assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); } + + public void testDuplicateKeysThrowsException() { + String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}"; + try { + settingsBuilder() + .loadFromSource(json) + .build(); + fail("expected exception"); + } catch (SettingsException e) { + assertEquals(e.getCause().getClass(), ElasticsearchParseException.class); + assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [13], previous value [bar], current value [baz]")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java new file mode 100644 index 00000000000..7a1897fbaf9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings.loader; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.charset.Charset; + +public class PropertiesSettingsLoaderTests extends ESTestCase { + public void testDuplicateKeyFromStringThrowsException() throws IOException { + PropertiesSettingsLoader loader = new PropertiesSettingsLoader(); + try { + loader.load("foo=bar\nfoo=baz"); + fail("expected exception"); + } catch (ElasticsearchParseException e) { + assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]"); + } + } + + public void testDuplicateKeysFromBytesThrowsException() throws IOException { + PropertiesSettingsLoader loader = new PropertiesSettingsLoader(); + try { + loader.load("foo=bar\nfoo=baz".getBytes(Charset.defaultCharset())); + } catch (ElasticsearchParseException e) { + assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]"); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java index 49b5444a52b..60bf80a6e9d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.settings.loader; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; @@ -31,7 +32,6 @@ import static org.hamcrest.Matchers.equalTo; * */ public class YamlSettingsLoaderTests extends ESTestCase { - @Test public void testSimpleYamlSettings() throws Exception { String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml"; @@ -66,4 +66,17 @@ public class YamlSettingsLoaderTests extends ESTestCase { .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .build(); } -} \ No newline at end of file + + public void testDuplicateKeysThrowsException() { + String yaml = "foo: bar\nfoo: baz"; + try { + settingsBuilder() + .loadFromSource(yaml) + .build(); + fail("expected exception"); + } catch (SettingsException e) { + assertEquals(e.getCause().getClass(), ElasticsearchParseException.class); + assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [2], column number [6], previous value [bar], current value [baz]")); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java index 09c4b800568..d277e3a240c 100644 --- a/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java @@ -28,8 +28,16 @@ import org.apache.lucene.util.Counter; import org.elasticsearch.test.ESTestCase; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; +import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -129,4 +137,51 @@ public class CollectionUtilsTests extends ESTestCase { } + public void testEmptyPartition() { + assertEquals( + Collections.emptyList(), + eagerPartition(Collections.emptyList(), 1) + ); + } + + public void testSimplePartition() { + assertEquals( + Arrays.asList( + Arrays.asList(1, 2), + Arrays.asList(3, 4), + Arrays.asList(5) + ), + eagerPartition(Arrays.asList(1, 2, 3, 4, 5), 2) + ); + } + + public void testSingletonPartition() { + assertEquals( + Arrays.asList( + Arrays.asList(1), + Arrays.asList(2), + Arrays.asList(3), + Arrays.asList(4), + Arrays.asList(5) + ), + eagerPartition(Arrays.asList(1, 2, 3, 4, 5), 1) + ); + } + + public void testOversizedPartition() { + assertEquals( + Arrays.asList(Arrays.asList(1, 2, 3, 4, 5)), + eagerPartition(Arrays.asList(1, 2, 3, 4, 5), 15) + ); + } + + public void testPerfectPartition() { + assertEquals( + Arrays.asList( + Arrays.asList(1, 2, 3, 4, 5, 6), + Arrays.asList(7, 8, 9, 10, 11, 12) + ), + eagerPartition(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), 6) + ); + } } diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java index fc05a79423d..f91697884c0 100644 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java @@ -83,7 +83,7 @@ public class MultiDataPathUpgraderTests extends ESTestCase { ShardStateMetaData.FORMAT.write(new ShardStateMetaData(metaStateVersion, true, uuid), metaStateVersion, shardDataPaths); } final Path path = randomFrom(shardDataPaths); - ShardPath targetPath = new ShardPath(path, path, uuid, new ShardId("foo", 0)); + ShardPath targetPath = new ShardPath(false, path, path, uuid, new ShardId("foo", 0)); MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); helper.upgrade(shardId, targetPath); assertFalse(helper.needsUpgrading(shardId)); @@ -177,7 +177,7 @@ public class MultiDataPathUpgraderTests extends ESTestCase { } logger.info("--> injecting index [{}] into multiple data paths", indexName); OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, multiDataPath); - final ShardPath shardPath = new ShardPath(nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0)); + final ShardPath shardPath = new ShardPath(false, nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0)); logger.info("{}", FileSystemUtils.files(shardPath.resolveIndex())); diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index ef1c0a9d7ec..520b619caaa 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.util.concurrent; -import com.google.common.collect.Lists; import org.elasticsearch.common.Priority; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -26,9 +25,16 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.concurrent.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; @@ -42,7 +48,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { @Test public void testPriorityQueue() throws Exception { PriorityBlockingQueue<Priority> queue = new PriorityBlockingQueue<>(); - List<Priority> priorities = Lists.newArrayList(Priority.values()); + List<Priority> priorities = Arrays.asList(Priority.values()); Collections.shuffle(priorities); for (Priority priority : priorities) { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index d273f7c22b6..9089ec51ba9 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -19,19 +19,31 @@ package org.elasticsearch.common.xcontent.builder; -import com.google.common.collect.Lists; - import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.FastCharArrayWriter; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentGenerator; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.junit.Test; import java.io.IOException; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.CAMELCASE; import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.UNDERSCORE; @@ -142,7 +154,7 @@ public class XContentBuilderTests extends ESTestCase { @Test public void testOverloadedList() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - builder.startObject().field("test", Lists.newArrayList("1", "2")).endObject(); + builder.startObject().field("test", Arrays.asList("1", "2")).endObject(); assertThat(builder.string(), equalTo("{\"test\":[\"1\",\"2\"]}")); } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index cc293375a2c..55204365688 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -42,7 +42,11 @@ import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; import org.hamcrest.Matchers; import org.junit.Test; @@ -112,7 +116,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase { createIndex("test"); ensureSearchable("test"); RecoveryResponse r = client().admin().indices().prepareRecoveries("test").get(); - int numRecoveriesBeforeNewMaster = r.shardResponses().get("test").size(); + int numRecoveriesBeforeNewMaster = r.shardRecoveryStates().get("test").size(); final String oldMaster = internalCluster().getMasterName(); internalCluster().stopCurrentMasterNode(); @@ -127,7 +131,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase { ensureSearchable("test"); r = client().admin().indices().prepareRecoveries("test").get(); - int numRecoveriesAfterNewMaster = r.shardResponses().get("test").size(); + int numRecoveriesAfterNewMaster = r.shardRecoveryStates().get("test").size(); assertThat(numRecoveriesAfterNewMaster, equalTo(numRecoveriesBeforeNewMaster)); } diff --git a/core/src/test/java/org/elasticsearch/document/BulkIT.java b/core/src/test/java/org/elasticsearch/document/BulkIT.java index 3c13b3fe31e..78746847840 100644 --- a/core/src/test/java/org/elasticsearch/document/BulkIT.java +++ b/core/src/test/java/org/elasticsearch/document/BulkIT.java @@ -195,7 +195,7 @@ public class BulkIT extends ESIntegTestCase { bulkResponse = client().prepareBulk() .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "2").setVersion(10)) // INTERNAL .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "3").setVersion(20).setVersionType(VersionType.FORCE)) - .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "3").setVersion(20).setVersionType(VersionType.INTERNAL)).get(); + .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "4").setVersion(20).setVersionType(VersionType.INTERNAL)).get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(20l)); diff --git a/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java b/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java index f1c6d476eb5..529b60562bf 100644 --- a/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java +++ b/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java @@ -35,7 +35,10 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; /** */ @@ -146,7 +149,7 @@ public class ShardInfoIT extends ESIntegTestCase { RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("idx") .setActiveOnly(true) .get(); - assertThat(recoveryResponse.shardResponses().get("idx").size(), equalTo(0)); + assertThat(recoveryResponse.shardRecoveryStates().get("idx").size(), equalTo(0)); } }); } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java index 745c98f664e..fbd8b973fad 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -99,8 +98,7 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase { HashMap<String, String> map = new HashMap<>(); map.put("details", "true"); final ToXContent.Params params = new ToXContent.MapParams(map); - for (ShardRecoveryResponse response : recoveryResponse.shardResponses().get("test")) { - RecoveryState recoveryState = response.recoveryState(); + for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { final String recoverStateAsJSON = XContentHelper.toString(recoveryState, params); if (!recoveryState.getPrimary()) { RecoveryState.Index index = recoveryState.getIndex(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index aa229305a2f..341139ba88b 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; @@ -400,8 +399,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { assertSyncIdsNotNull(); } RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); - for (ShardRecoveryResponse response : recoveryResponse.shardResponses().get("test")) { - RecoveryState recoveryState = response.recoveryState(); + for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { long recovered = 0; for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { if (file.name().startsWith("segments")) { @@ -410,7 +408,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { } if (!recoveryState.getPrimary() && (useSyncIds == false)) { logger.info("--> replica shard {} recovered from {} to {}, recovered {}, reuse {}", - response.getShardId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered)); assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l)); @@ -422,7 +420,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { } else { if (useSyncIds && !recoveryState.getPrimary()) { logger.info("--> replica shard {} recovered from {} to {} using sync id, recovered {}, reuse {}", - response.getShardId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), + recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); } assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l)); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java index de923f5d3f9..e3a5861ea62 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTest.java @@ -19,27 +19,34 @@ package org.elasticsearch.http.netty; import com.google.common.base.Charsets; -import com.google.common.collect.Lists; - +import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent; import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.threadpool.ThreadPool; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; -import org.jboss.netty.channel.*; -import org.jboss.netty.handler.codec.http.*; +import org.jboss.netty.channel.ChannelHandlerContext; +import org.jboss.netty.channel.ChannelPipeline; +import org.jboss.netty.channel.ChannelPipelineFactory; +import org.jboss.netty.channel.ExceptionEvent; +import org.jboss.netty.channel.MessageEvent; +import org.jboss.netty.channel.SimpleChannelUpstreamHandler; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpRequest; +import org.jboss.netty.handler.codec.http.HttpResponse; +import org.jboss.netty.handler.codec.http.QueryStringDecoder; import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -108,7 +115,7 @@ public class NettyHttpServerPipeliningTest extends ESTestCase { List<String> requests = Arrays.asList("/slow?sleep=1000", "/firstfast", "/secondfast", "/thirdfast", "/slow?sleep=500"); try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { Collection<HttpResponse> responses = nettyHttpClient.sendRequests(transportAddress.address(), requests.toArray(new String[]{})); - List<String> responseBodies = Lists.newArrayList(returnHttpResponseBodies(responses)); + List<String> responseBodies = new ArrayList<>(returnHttpResponseBodies(responses)); // we cannot be sure about the order of the fast requests, but the slow ones should have to be last assertThat(responseBodies, hasSize(5)); assertThat(responseBodies.get(3), is("/slow?sleep=500")); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java index 55879671993..1d968b75b73 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.http.netty; -import com.google.common.collect.Lists; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.http.HttpServerTransport; @@ -27,6 +26,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.jboss.netty.handler.codec.http.HttpResponse; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -36,7 +36,8 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.http.netty.NettyHttpClient.returnOpaqueIds; import static org.elasticsearch.test.ESIntegTestCase.ClusterScope; import static org.elasticsearch.test.ESIntegTestCase.Scope; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; /** * @@ -61,7 +62,7 @@ public class NettyPipeliningDisabledIT extends ESIntegTestCase { Collection<HttpResponse> responses = nettyHttpClient.sendRequests(inetSocketTransportAddress.address(), requests.toArray(new String[]{})); assertThat(responses, hasSize(requests.size())); - List<String> opaqueIds = Lists.newArrayList(returnOpaqueIds(responses)); + List<String> opaqueIds = new ArrayList<>(returnOpaqueIds(responses)); assertResponsesOutOfOrder(opaqueIds); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 09512433784..3b955b271af 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -45,12 +45,16 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import org.junit.Test; import java.io.IOException; import java.nio.file.Path; import java.util.Collection; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -59,7 +63,6 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; @@ -576,7 +579,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { ensureGreen(IDX); int docCount = randomIntBetween(10, 100); - List<IndexRequestBuilder> builders = newArrayList(); + List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < docCount; i++) { builders.add(client().prepareIndex(IDX, "doc", i + "").setSource("foo", "bar")); } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 4a09eeed6b6..03986188c6c 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -42,8 +42,8 @@ import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.hamcrest.MatcherAssert; -import org.junit.Test; import java.io.BufferedWriter; import java.io.IOException; @@ -54,9 +54,7 @@ import java.nio.file.Path; import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.*; /** * @@ -87,26 +85,22 @@ public class AnalysisModuleTests extends ESTestCase { } - @Test public void testSimpleConfigurationJson() { Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.json"); testSimpleConfiguration(settings); } - @Test public void testSimpleConfigurationYaml() { Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.yml"); testSimpleConfiguration(settings); } - @Test public void testDefaultFactoryTokenFilters() throws IOException { assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class); assertTokenFilter("persian_normalization", PersianNormalizationFilter.class); assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class); } - @Test public void testVersionedAnalyzers() throws Exception { String yaml = "/org/elasticsearch/index/analysis/test1.yml"; Settings settings2 = settingsBuilder() @@ -164,7 +158,7 @@ public class AnalysisModuleTests extends ESTestCase { // html = (HtmlStripCharFilterFactory) custom2.charFilters()[1]; // assertThat(html.readAheadLimit(), equalTo(1024)); - // verify position offset gap + // verify position increment gap analyzer = analysisService.analyzer("custom6").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom6 = (CustomAnalyzer) analyzer; @@ -215,7 +209,6 @@ public class AnalysisModuleTests extends ESTestCase { // MatcherAssert.assertThat(wordList, hasItems("donau", "dampf", "schiff", "spargel", "creme", "suppe")); } - @Test public void testWordListPath() throws Exception { Settings settings = Settings.builder() .put("path.home", createTempDir().toString()) @@ -243,7 +236,6 @@ public class AnalysisModuleTests extends ESTestCase { return wordListFile; } - @Test public void testUnderscoreInAnalyzerName() { Settings settings = Settings.builder() .put("index.analysis.analyzer._invalid_name.tokenizer", "keyword") @@ -255,11 +247,10 @@ public class AnalysisModuleTests extends ESTestCase { fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); } catch (ProvisionException e) { assertTrue(e.getCause() instanceof IllegalArgumentException); - assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); + assertThat(e.getCause().getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")).or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); } } - @Test public void testUnderscoreInAnalyzerNameAlias() { Settings settings = Settings.builder() .put("index.analysis.analyzer.valid_name.tokenizer", "keyword") @@ -275,4 +266,61 @@ public class AnalysisModuleTests extends ESTestCase { assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); } } + + public void testBackwardCompatible() { + Settings settings = settingsBuilder() + .put("index.analysis.analyzer.custom1.tokenizer", "standard") + .put("index.analysis.analyzer.custom1.position_offset_gap", "128") + .put("index.analysis.analyzer.custom2.tokenizer", "standard") + .put("index.analysis.analyzer.custom2.position_increment_gap", "256") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, + Version.V_1_7_1)) + .build(); + AnalysisService analysisService = getAnalysisService(settings); + + Analyzer custom1 = analysisService.analyzer("custom1").analyzer(); + assertThat(custom1, instanceOf(CustomAnalyzer.class)); + assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128)); + + Analyzer custom2 = analysisService.analyzer("custom2").analyzer(); + assertThat(custom2, instanceOf(CustomAnalyzer.class)); + assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256)); + } + + public void testWithBothSettings() { + Settings settings = settingsBuilder() + .put("index.analysis.analyzer.custom.tokenizer", "standard") + .put("index.analysis.analyzer.custom.position_offset_gap", "128") + .put("index.analysis.analyzer.custom.position_increment_gap", "256") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, + Version.V_1_7_1)) + .build(); + try { + getAnalysisService(settings); + fail("Analyzer has both position_offset_gap and position_increment_gap should fail"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" + + ", use only [position_increment_gap]")); + } + } + + public void testDeprecatedPositionOffsetGap() { + Settings settings = settingsBuilder() + .put("index.analysis.analyzer.custom.tokenizer", "standard") + .put("index.analysis.analyzer.custom.position_offset_gap", "128") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + try { + getAnalysisService(settings); + fail("Analyzer should fail if it has position_offset_gap"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("Option [position_offset_gap] in Custom Analyzer [custom] " + + "has been renamed, please use [position_increment_gap] instead.")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/test1.json b/core/src/test/java/org/elasticsearch/index/analysis/test1.json index 927d0704d1f..24349635671 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/test1.json +++ b/core/src/test/java/org/elasticsearch/index/analysis/test1.json @@ -68,7 +68,7 @@ }, "custom6":{ "tokenizer":"standard", - "position_offset_gap": 256 + "position_increment_gap": 256 }, "czechAnalyzerWithStemmer":{ "tokenizer":"standard", diff --git a/core/src/test/java/org/elasticsearch/index/analysis/test1.yml b/core/src/test/java/org/elasticsearch/index/analysis/test1.yml index 518ef2c5f43..196e4ef871a 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/test1.yml +++ b/core/src/test/java/org/elasticsearch/index/analysis/test1.yml @@ -50,7 +50,7 @@ index : char_filter : [my_mapping] custom6 : tokenizer : standard - position_offset_gap: 256 + position_increment_gap: 256 custom7 : type : standard version: 3.6 diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 8edc47410a3..f3d45a8061c 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -29,15 +29,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexDeletionPolicy; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LiveIndexWriterConfig; -import org.apache.lucene.index.LogByteSizeMergePolicy; -import org.apache.lucene.index.MergePolicy; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TieredMergePolicy; +import org.apache.lucene.index.*; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; @@ -55,7 +47,6 @@ import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.uid.Versions; @@ -69,16 +60,9 @@ import org.elasticsearch.index.deletionpolicy.KeepOnlyLastDeletionPolicy; import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy; import org.elasticsearch.index.engine.Engine.Searcher; import org.elasticsearch.index.indexing.ShardIndexingService; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.Mapper.BuilderContext; -import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; @@ -107,13 +91,7 @@ import java.nio.charset.Charset; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; +import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; @@ -121,12 +99,7 @@ import java.util.regex.Pattern; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; public class InternalEngineTests extends ESTestCase { @@ -1984,8 +1957,8 @@ public class InternalEngineTests extends ESTestCase { } @Override - protected Tuple<DocumentMapper, Mapping> docMapper(String type) { - return new Tuple<>(docMapper, mappingUpdate); + protected DocumentMapperForType docMapper(String type) { + return new DocumentMapperForType(docMapper, mappingUpdate); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 3dee75021dc..9a4250f188e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -20,12 +20,15 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.collect.Lists; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; +import org.apache.lucene.index.CompositeReaderContext; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.English; @@ -40,8 +43,16 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; import static org.hamcrest.Matchers.*; @@ -578,8 +589,8 @@ public class DuelFieldDataTests extends AbstractFieldDataTests { final int numValues = leftValues.count(); rightValues.setDocument(i);; assertEquals(numValues, rightValues.count()); - List<GeoPoint> leftPoints = Lists.newArrayList(); - List<GeoPoint> rightPoints = Lists.newArrayList(); + List<GeoPoint> leftPoints = new ArrayList<>(); + List<GeoPoint> rightPoints = new ArrayList<>(); for (int j = 0; j < numValues; ++j) { GeoPoint l = leftValues.valueAt(j); leftPoints.add(new GeoPoint(l.getLat(), l.getLon())); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java new file mode 100644 index 00000000000..fccf642e9df --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.test.ESSingleNodeTestCase; + +// TODO: make this a real unit test +public class DocumentParserTests extends ESSingleNodeTestCase { + + public void testTypeDisabled() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .field("enabled", false).endObject().endObject().string(); + DocumentMapper mapper = mapperParser.parse(mapping); + + BytesReference bytes = XContentFactory.jsonBuilder() + .startObject().startObject("foo") + .field("field", "1234") + .endObject().endObject().bytes(); + ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + assertNull(doc.rootDoc().getField("field")); + assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); + } + + public void testFieldDisabled() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").field("enabled", false).endObject() + .startObject("bar").field("type", "integer").endObject() + .endObject().endObject().endObject().string(); + DocumentMapper mapper = mapperParser.parse(mapping); + + BytesReference bytes = XContentFactory.jsonBuilder() + .startObject() + .field("foo", "1234") + .field("bar", 10) + .endObject().bytes(); + ParsedDocument doc = mapper.parse("test", "type", "1", bytes); + assertNull(doc.rootDoc().getField("foo")); + assertNotNull(doc.rootDoc().getField("bar")); + assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 02841f7f570..f989467c0f2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -19,16 +19,11 @@ package org.elasticsearch.index.mapper; import com.google.common.collect.ImmutableMap; - import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -368,7 +363,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // Even if the dynamic type of our new field is long, we already have a mapping for the same field // of type string so it should be mapped as a string - DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").v1(); + DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper(); Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(), XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject()); Mapper myField1Mapper = null; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index bfefe436fa7..5ea01ee244e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -20,14 +20,13 @@ package org.elasticsearch.index.mapper; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -231,7 +230,7 @@ public class FieldTypeLookupTests extends ESTestCase { } static List<FieldMapper> newList(FieldMapper... mapper) { - return Lists.newArrayList(mapper); + return Arrays.asList(mapper); } // this sucks how much must be overridden just do get a dummy field mapper... diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java index b6ecb24d42a..4e906ae82fa 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper.date; -import com.google.common.collect.Lists; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; @@ -33,6 +32,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -84,7 +84,7 @@ public class DateBackwardsCompatibilityTests extends ESSingleNodeTestCase { public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception { long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 - List<String> dateFormats = Lists.newArrayList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute"); + List<String> dateFormats = Arrays.asList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute"); for (String format : dateFormats) { XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 0024a63a4b5..e0c7a30673a 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.externalvalues; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.spatial4j.core.shape.Point; import org.apache.lucene.document.Field; import org.elasticsearch.common.Strings; @@ -43,6 +42,7 @@ import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; import java.nio.charset.Charset; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -220,7 +220,7 @@ public class ExternalMapper extends FieldMapper { @Override public Iterator<Mapper> iterator() { - return Iterators.concat(super.iterator(), Lists.newArrayList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); + return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index f6a4c30c863..4ec0ff5211e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -38,7 +38,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; public class DefaultSourceMappingTests extends ESSingleNodeTestCase { @@ -202,7 +202,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test").mapperService(); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false); - DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); + DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").getDocumentMapper(); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(false)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index a540dd8e6a1..f122de90202 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -20,12 +20,12 @@ package org.elasticsearch.index.mapper.string; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,11 +39,13 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import org.junit.Before; import org.junit.Test; @@ -51,9 +53,7 @@ import java.util.Arrays; import java.util.Map; import static org.elasticsearch.index.mapper.core.StringFieldMapper.Builder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; /** */ @@ -222,22 +222,22 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties") .startObject("field1") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .endObject() .startObject("field2") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .field("analyzer", "standard") .endObject() .startObject("field3") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "simple") .endObject() .startObject("field4") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "simple") .field("search_quote_analyzer", "simple") @@ -246,7 +246,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); DocumentMapper mapper = parser.parse(mapping); - for (String fieldName : Lists.newArrayList("field1", "field2", "field3", "field4")) { + for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) { Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper); assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer")); } @@ -256,12 +256,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties") .startObject("field1") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .field("search_quote_analyzer", "simple") .endObject() .startObject("field2") .field("type", "string") - .field("position_offset_gap", 1000) + .field("position_increment_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "standard") .field("search_quote_analyzer", "simple") @@ -270,7 +270,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); mapper = parser.parse(mapping); - for (String fieldName : Lists.newArrayList("field1", "field2")) { + for (String fieldName : Arrays.asList("field1", "field2")) { Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper); assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); } @@ -518,4 +518,48 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms")); } + /** + * Test that expected exceptions are thrown when creating a new index with position_offset_gap + */ + public void testPositionOffsetGapDeprecation() throws Exception { + // test deprecation exceptions on newly created indexes + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "string") + .field("position_increment_gap", 10) + .endObject() + .startObject("field2") + .field("type", "string") + .field("position_offset_gap", 50) + .field("analyzer", "standard") + .endObject().endObject().endObject().endObject().string(); + try { + parser.parse(mapping); + fail("Mapping definition should fail with the position_offset_gap setting"); + }catch (MapperParsingException e) { + assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]"); + } + } + + /** + * Test backward compatibility + */ + public void testBackwardCompatible() throws Exception { + + Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, + Version.V_1_7_1)).build(); + + DocumentMapperParser parser = createIndex("backward_compatible_index", settings).mapperService().documentMapperParser(); + + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "string") + .field("position_offset_gap", 10) + .endObject().endObject().endObject().endObject().string(); + parser.parse(mapping); + + assertThat(parser.parse(mapping).mapping().toString(), containsString("\"position_increment_gap\":10")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java new file mode 100644 index 00000000000..61a9c3ac9bd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.string; + +import com.google.common.collect.ImmutableList; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; + +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.containsString; + +/** + * Tests that position_increment_gap is read from the mapper and applies as + * expected in queries. + */ +public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase { + /** + * The default position_increment_gap should be large enough that most + * "sensible" queries phrase slops won't match across values. + */ + public void testDefault() throws IOException { + assertGapIsOneHundred(client(), "test", "test"); + } + + /** + * Asserts that the post-2.0 default is being applied. + */ + public static void assertGapIsOneHundred(Client client, String indexName, String type) throws IOException { + testGap(client, indexName, type, 100); + + // No match across gap using default slop with default positionIncrementGap + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); + + // Nor with small-ish values + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); + + // But huge-ish values still match + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); + } + + public void testZero() throws IOException { + setupGapInMapping(0); + assertGapIsZero(client(), "test", "test"); + } + + /** + * Asserts that the pre-2.0 default has been applied or explicitly + * configured. + */ + public static void assertGapIsZero(Client client, String indexName, String type) throws IOException { + testGap(client, indexName, type, 0); + /* + * Phrases match across different values using default slop with pre-2.0 default + * position_increment_gap. + */ + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 1); + } + + public void testLargerThanDefault() throws IOException { + setupGapInMapping(10000); + testGap(client(), "test", "test", 10000); + } + + public void testSmallerThanDefault() throws IOException { + setupGapInMapping(2); + testGap(client(), "test", "test", 2); + } + + public void testNegativeIsError() throws IOException { + try { + setupGapInMapping(-1); + fail("Expected an error"); + } catch (MapperParsingException e) { + assertThat(ExceptionsHelper.detailedMessage(e), containsString("positions_increment_gap less than 0 aren't allowed")); + } + } + + /** + * Tests that the default actually defaults to the position_increment_gap + * configured in the analyzer. This behavior is very old and a little + * strange but not worth breaking some thought. + */ + public void testDefaultDefaultsToAnalyzer() throws IOException { + XContentBuilder settings = XContentFactory.jsonBuilder().startObject().startObject("analysis").startObject("analyzer") + .startObject("gappy"); + settings.field("type", "custom"); + settings.field("tokenizer", "standard"); + settings.field("position_increment_gap", 2); + setupAnalyzer(settings, "gappy"); + testGap(client(), "test", "test", 2); + } + + /** + * Build an index named "test" with a field named "string" with the provided + * positionIncrementGap that uses the standard analyzer. + */ + private void setupGapInMapping(int positionIncrementGap) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); + mapping.field("type", "string"); + mapping.field("position_increment_gap", positionIncrementGap); + client().admin().indices().prepareCreate("test").addMapping("test", mapping).get(); + } + + /** + * Build an index named "test" with the provided settings and and a field + * named "string" that uses the specified analyzer and default + * position_increment_gap. + */ + private void setupAnalyzer(XContentBuilder settings, String analyzer) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); + mapping.field("type", "string"); + mapping.field("analyzer", analyzer); + client().admin().indices().prepareCreate("test").addMapping("test", mapping).setSettings(settings).get(); + } + + private static void testGap(Client client, String indexName, String type, int positionIncrementGap) throws IOException { + client.prepareIndex(indexName, type, "position_gap_test").setSource("string", ImmutableList.of("one", "two three")).setRefresh(true).get(); + + // Baseline - phrase query finds matches in the same field value + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); + + if (positionIncrementGap > 0) { + // No match across gaps when slop < position gap + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap - 1)).get(), + 0); + } + + // Match across gaps when slop >= position gap + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap)).get(), 1); + assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap + 1)).get(), 1); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 92f766de6c2..559d5d1e225 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -19,25 +19,54 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.index.*; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.queries.BoostingQuery; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryWrapperFilter; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.apache.lucene.search.spans.*; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanContainingQuery; +import org.apache.lucene.search.spans.SpanFirstQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanNotQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.search.spans.SpanWithinQuery; import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; -import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -1044,7 +1073,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { @Test public void testTermsQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(termsQuery("name.first", Lists.newArrayList("shay", "test"))).query(); + Query parsedQuery = queryParser.parse(termsQuery("name.first", Arrays.asList("shay", "test"))).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); @@ -1138,7 +1167,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { @Test public void testInQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(termsQuery("name.first", Lists.newArrayList("test1", "test2", "test3"))).query(); + Query parsedQuery = queryParser.parse(termsQuery("name.first", Arrays.asList("test1", "test2", "test3"))).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); @@ -2452,7 +2481,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); assertThat(((ConstantScoreQuery) parsedQuery).getQuery(), instanceOf(ToParentBlockJoinQuery.class)); - assertThat(((ConstantScoreQuery) parsedQuery).getQuery().toString(), equalTo("ToParentBlockJoinQuery (+*:* #random_access(QueryWrapperFilter(_type:__nested)))")); + assertThat(((ConstantScoreQuery) parsedQuery).getQuery().toString(), equalTo("ToParentBlockJoinQuery (+*:* #QueryWrapperFilter(_type:__nested))")); SearchContext.removeCurrent(); } diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java index 2a74fb986e2..2577de5f1c9 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java @@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.TermQuery; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; @@ -53,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; import org.junit.AfterClass; @@ -65,11 +50,7 @@ import java.util.NavigableSet; import java.util.Random; import java.util.TreeSet; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.hamcrest.Matchers.equalTo; public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { @@ -119,9 +100,9 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { IndexReader indexReader = DirectoryReader.open(indexWriter.w, false); IndexSearcher searcher = new IndexSearcher(indexReader); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher( - SearchContext.current(), new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher) - )); + ((TestSearchContext) SearchContext.current()).setSearcher( + new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher) + ); TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3)))); BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); @@ -214,7 +195,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { Engine.Searcher engineSearcher = new Engine.Searcher( ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); int max = numUniqueChildValues / 4; for (int i = 0; i < max; i++) { @@ -243,7 +224,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { engineSearcher = new Engine.Searcher( ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); } String childValue = childValues[random().nextInt(numUniqueChildValues)]; diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java index 9e70f667502..60a98ffbe6b 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java @@ -22,34 +22,13 @@ import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.randomizedtesting.generators.RandomInts; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.MultiCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; @@ -65,7 +44,6 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; import org.junit.AfterClass; @@ -73,19 +51,9 @@ import org.junit.BeforeClass; import org.junit.Test; import java.io.IOException; -import java.util.Locale; -import java.util.Map; -import java.util.NavigableMap; -import java.util.Random; -import java.util.TreeMap; +import java.util.*; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.typeQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -195,7 +163,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase { Engine.Searcher engineSearcher = new Engine.Searcher( ChildrenQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); int max = numUniqueChildValues / 4; for (int i = 0; i < max; i++) { @@ -224,7 +192,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase { engineSearcher = new Engine.Searcher( ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); } String childValue = childValues[random().nextInt(numUniqueChildValues)]; @@ -385,7 +353,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase { // setup to read the parent/child map Engine.Searcher engineSearcher = new Engine.Searcher(ChildrenQueryTests.class.getSimpleName(), searcher); - ((TestSearchContext)context).setSearcher(new ContextIndexSearcher(context, engineSearcher)); + ((TestSearchContext)context).setSearcher(engineSearcher); // child query that returns the score as the value of "childScore" for each child document, with the parent's score determined by the score type QueryBuilder childQueryBuilder = functionScoreQuery(typeQuery("child")).add(new FieldValueFactorFunctionBuilder(CHILD_SCORE_NAME)); diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java index ac5c3a46ece..55047d85bb5 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java @@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.TermQuery; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; @@ -52,7 +38,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; import org.junit.AfterClass; @@ -64,11 +49,7 @@ import java.util.NavigableSet; import java.util.Random; import java.util.TreeSet; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; /** */ @@ -173,7 +154,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase { Engine.Searcher engineSearcher = new Engine.Searcher( ParentConstantScoreQuery.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); int max = numUniqueParentValues / 4; for (int i = 0; i < max; i++) { @@ -200,7 +181,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase { engineSearcher = new Engine.Searcher( ParentConstantScoreQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); } String parentValue = parentValues[random().nextInt(numUniqueParentValues)]; diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java index 836ddf3b6a8..3fd638473ce 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java @@ -21,30 +21,12 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.MultiCollector; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; @@ -57,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; import org.junit.AfterClass; @@ -70,11 +51,7 @@ import java.util.NavigableMap; import java.util.Random; import java.util.TreeMap; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; public class ParentQueryTests extends AbstractChildTestCase { @@ -175,7 +152,7 @@ public class ParentQueryTests extends AbstractChildTestCase { Engine.Searcher engineSearcher = new Engine.Searcher( ParentQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); int max = numUniqueParentValues / 4; for (int i = 0; i < max; i++) { @@ -202,7 +179,7 @@ public class ParentQueryTests extends AbstractChildTestCase { engineSearcher = new Engine.Searcher( ParentConstantScoreQueryTests.class.getSimpleName(), searcher ); - ((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); + ((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher); } String parentValue = parentValues[random().nextInt(numUniqueParentValues)]; diff --git a/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java b/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java index 5a84bb0cec1..68bcc4d2b8e 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import com.google.common.base.Charsets; -import com.google.common.collect.Lists; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESTestCase; @@ -28,7 +27,6 @@ import org.junit.Test; import java.util.ArrayList; -import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -41,11 +39,11 @@ public class CommitPointsTests extends ESTestCase { @Test public void testCommitPointXContent() throws Exception { - ArrayList<CommitPoint.FileInfo> indexFiles = Lists.newArrayList(); + ArrayList<CommitPoint.FileInfo> indexFiles = new ArrayList<>(); indexFiles.add(new CommitPoint.FileInfo("file1", "file1_p", 100, "ck1")); indexFiles.add(new CommitPoint.FileInfo("file2", "file2_p", 200, "ck2")); - ArrayList<CommitPoint.FileInfo> translogFiles = Lists.newArrayList(); + ArrayList<CommitPoint.FileInfo> translogFiles = new ArrayList<>(); translogFiles.add(new CommitPoint.FileInfo("t_file1", "t_file1_p", 100, null)); translogFiles.add(new CommitPoint.FileInfo("t_file2", "t_file2_p", 200, null)); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 1efe8d7f776..656d251b1bf 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -20,19 +20,29 @@ package org.elasticsearch.index.shard; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; @@ -57,7 +67,10 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.cluster.metadata.IndexMetaData.*; +import static org.elasticsearch.cluster.metadata.IndexMetaData.EMPTY_PARAMS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -539,4 +552,36 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertPathHasBeenCleared(startDir.toAbsolutePath().toString()); assertPathHasBeenCleared(endDir.toAbsolutePath().toString()); } + + public void testShardStats() throws IOException { + createIndex("test"); + ensureGreen(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService("test"); + IndexShard shard = test.shard(0); + ShardStats stats = new ShardStats(shard, new CommonStatsFlags()); + assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); + assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); + assertEquals(shard.shardPath().isCustomDataPath(), stats.isCustomDataPath()); + + if (randomBoolean() || true) { // try to serialize it to ensure values survive the serialization + BytesStreamOutput out = new BytesStreamOutput(); + stats.writeTo(out); + StreamInput in = StreamInput.wrap(out.bytes()); + stats = ShardStats.readShardStats(in); + } + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + stats.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + String xContent = builder.string(); + StringBuilder expectedSubSequence = new StringBuilder("\"shard_path\":{\"state_path\":\""); + expectedSubSequence.append(shard.shardPath().getRootStatePath().toString()); + expectedSubSequence.append("\",\"data_path\":\""); + expectedSubSequence.append(shard.shardPath().getRootDataPath().toString()); + expectedSubSequence.append("\",\"is_custom_data_path\":").append(shard.shardPath().isCustomDataPath()).append("}"); + assumeFalse("Some path weirdness on windows", Constants.WINDOWS); + assertTrue(xContent.contains(expectedSubSequence)); + } + } diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTest.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTest.java new file mode 100644 index 00000000000..d89c328d9dd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTest.java @@ -0,0 +1,236 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.shard; + +import com.carrotsearch.randomizedtesting.annotations.Repeat; + +import org.apache.lucene.mockfile.FilterFileSystem; +import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.mockfile.FilterPath; +import org.apache.lucene.util.Constants; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment.NodePath; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Field; +import java.nio.file.FileStore; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.FileAttributeView; +import java.nio.file.attribute.FileStoreAttributeView; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + +/** Separate test class from ShardPathTests because we need static (BeforeClass) setup to install mock filesystems... */ +@SuppressForbidden(reason = "ProviderMismatchException if I try to use PathUtils.getDefault instead") +public class NewPathForShardTest extends ESTestCase { + + // Sneakiness to install mock file stores so we can pretend how much free space we have on each path.data: + private static MockFileStore aFileStore = new MockFileStore("mocka"); + private static MockFileStore bFileStore = new MockFileStore("mockb"); + private static FileSystem origFileSystem; + private static String aPathPart = File.separator + 'a' + File.separator; + private static String bPathPart = File.separator + 'b' + File.separator; + + @BeforeClass + public static void installMockUsableSpaceFS() throws Exception { + // Necessary so when Environment.clinit runs, to gather all FileStores, it sees ours: + origFileSystem = FileSystems.getDefault(); + + Field field = PathUtils.class.getDeclaredField("DEFAULT"); + field.setAccessible(true); + FileSystem mock = new MockUsableSpaceFileSystemProvider().getFileSystem(getBaseTempDirForTestClass().toUri()); + field.set(null, mock); + assertEquals(mock, PathUtils.getDefaultFileSystem()); + } + + @AfterClass + public static void removeMockUsableSpaceFS() throws Exception { + Field field = PathUtils.class.getDeclaredField("DEFAULT"); + field.setAccessible(true); + field.set(null, origFileSystem); + origFileSystem = null; + aFileStore = null; + bFileStore = null; + } + + /** Mock file system that fakes usable space for each FileStore */ + @SuppressForbidden(reason = "ProviderMismatchException if I try to use PathUtils.getDefault instead") + static class MockUsableSpaceFileSystemProvider extends FilterFileSystemProvider { + + public MockUsableSpaceFileSystemProvider() { + super("mockusablespace://", FileSystems.getDefault()); + final List<FileStore> fileStores = new ArrayList<>(); + fileStores.add(aFileStore); + fileStores.add(bFileStore); + fileSystem = new FilterFileSystem(this, origFileSystem) { + @Override + public Iterable<FileStore> getFileStores() { + return fileStores; + } + }; + } + + @Override + public FileStore getFileStore(Path path) throws IOException { + if (path.toString().contains(aPathPart)) { + return aFileStore; + } else { + return bFileStore; + } + } + } + + static class MockFileStore extends FileStore { + + public long usableSpace; + + private final String desc; + + public MockFileStore(String desc) { + this.desc = desc; + } + + @Override + public String type() { + return "mock"; + } + + @Override + public String name() { + return desc; + } + + @Override + public String toString() { + return desc; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public long getTotalSpace() throws IOException { + return usableSpace*3; + } + + @Override + public long getUsableSpace() throws IOException { + return usableSpace; + } + + @Override + public long getUnallocatedSpace() throws IOException { + return usableSpace*2; + } + + @Override + public boolean supportsFileAttributeView(Class<? extends FileAttributeView> type) { + return false; + } + + @Override + public boolean supportsFileAttributeView(String name) { + return false; + } + + @Override + public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> type) { + return null; + } + + @Override + public Object getAttribute(String attribute) throws IOException { + return null; + } + } + + public void testSelectNewPathForShard() throws Exception { + assumeFalse("Consistenty fails on windows ('could not remove the following files')", Constants.WINDOWS); + Path path = PathUtils.get(createTempDir().toString()); + + // Use 2 data paths: + String[] paths = new String[] {path.resolve("a").toString(), + path.resolve("b").toString()}; + + Settings settings = Settings.builder() + .put("path.home", path) + .putArray("path.data", paths).build(); + NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings)); + + // Make sure all our mocking above actually worked: + NodePath[] nodePaths = nodeEnv.nodePaths(); + assertEquals(2, nodePaths.length); + + assertEquals("mocka", nodePaths[0].fileStore.name()); + assertEquals("mockb", nodePaths[1].fileStore.name()); + + // Path a has lots of free space, but b has little, so new shard should go to a: + aFileStore.usableSpace = 100000; + bFileStore.usableSpace = 1000; + + ShardId shardId = new ShardId("index", 0); + ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, Settings.EMPTY, 100, Collections.<Path,Integer>emptyMap()); + assertTrue(result.getDataPath().toString().contains(aPathPart)); + + // Test the reverse: b has lots of free space, but a has little, so new shard should go to b: + aFileStore.usableSpace = 1000; + bFileStore.usableSpace = 100000; + + shardId = new ShardId("index", 0); + result = ShardPath.selectNewPathForShard(nodeEnv, shardId, Settings.EMPTY, 100, Collections.<Path,Integer>emptyMap()); + assertTrue(result.getDataPath().toString().contains(bPathPart)); + + // Now a and be have equal usable space; we allocate two shards to the node, and each should go to different paths: + aFileStore.usableSpace = 100000; + bFileStore.usableSpace = 100000; + + Map<Path,Integer> dataPathToShardCount = new HashMap<>(); + ShardPath result1 = ShardPath.selectNewPathForShard(nodeEnv, shardId, Settings.EMPTY, 100, dataPathToShardCount); + dataPathToShardCount.put(NodeEnvironment.shardStatePathToDataPath(result1.getDataPath()), 1); + ShardPath result2 = ShardPath.selectNewPathForShard(nodeEnv, shardId, Settings.EMPTY, 100, dataPathToShardCount); + + // #11122: this was the original failure: on a node with 2 disks that have nearly equal + // free space, we would always allocate all N incoming shards to the one path that + // had the most free space, never using the other drive unless new shards arrive + // after the first shards started using storage: + assertNotEquals(result1.getDataPath(), result2.getDataPath()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index e03698f62ee..32d20190890 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.shard; +import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; @@ -26,6 +27,7 @@ import org.junit.Test; import java.io.IOException; import java.nio.file.Path; +import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -78,4 +80,71 @@ public class ShardPathTests extends ESTestCase { } } + @Test(expected = IllegalArgumentException.class) + public void testIllegalCustomDataPath() { + final Path path = createTempDir().resolve("foo").resolve("0"); + new ShardPath(true, path, path, "foo", new ShardId("foo", 0)); + } + + public void testValidCtor() { + final Path path = createTempDir().resolve("foo").resolve("0"); + ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", 0)); + assertFalse(shardPath.isCustomDataPath()); + assertEquals(shardPath.getDataPath(), path); + assertEquals(shardPath.getShardStatePath(), path); + } + + public void testGetRootPaths() throws IOException { + boolean useCustomDataPath = randomBoolean(); + final Settings indexSetttings; + final Settings nodeSettings; + Settings.Builder indexSettingsBuilder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF"); + final Path customPath; + if (useCustomDataPath) { + final Path path = createTempDir(); + final boolean includeNodeId = randomBoolean(); + indexSetttings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build(); + nodeSettings = settingsBuilder().put("path.shared_data", path.toAbsolutePath().toAbsolutePath()) + .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, includeNodeId).build(); + if (includeNodeId) { + customPath = path.resolve("custom").resolve("0"); + } else { + customPath = path.resolve("custom"); + } + } else { + customPath = null; + indexSetttings = indexSettingsBuilder.build(); + nodeSettings = Settings.EMPTY; + } + try (final NodeEnvironment env = newNodeEnvironment(nodeSettings)) { + ShardId shardId = new ShardId("foo", 0); + Path[] paths = env.availableShardPaths(shardId); + Path path = randomFrom(paths); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF"), 2, path); + ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, indexSetttings); + boolean found = false; + for (Path p : env.nodeDataPaths()) { + if (p.equals(shardPath.getRootStatePath())) { + found = true; + break; + } + } + assertTrue("root state paths must be a node path but wasn't: " + shardPath.getRootStatePath(), found); + found = false; + if (useCustomDataPath) { + assertNotEquals(shardPath.getRootDataPath(), shardPath.getRootStatePath()); + assertEquals(customPath, shardPath.getRootDataPath()); + } else { + assertNull(customPath); + for (Path p : env.nodeDataPaths()) { + if (p.equals(shardPath.getRootDataPath())) { + found = true; + break; + } + } + assertTrue("root state paths must be a node path but wasn't: " + shardPath.getRootDataPath(), found); + } + } + } + } diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 667174fe210..62d1b42d459 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -21,12 +21,14 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Charsets; import com.google.common.base.Predicate; -import com.google.common.collect.Lists; - import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.store.*; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -40,7 +42,11 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Nullable; @@ -53,7 +59,10 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.index.settings.IndexSettings; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; +import org.elasticsearch.index.shard.MergePolicyConfig; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; @@ -67,7 +76,10 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import org.junit.Test; import java.io.IOException; @@ -79,7 +91,14 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -87,6 +106,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; @@ -600,7 +620,7 @@ public class CorruptedFileIT extends ESIntegTestCase { private ShardRouting corruptRandomPrimaryFile(final boolean includePerCommitFiles) throws IOException { ClusterState state = client().admin().cluster().prepareState().get().getState(); GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false); - List<ShardIterator> iterators = Lists.newArrayList(shardIterators); + List<ShardIterator> iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); assertNotNull(shardRouting); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 70fcfd4b180..c5158b782e3 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -19,9 +19,7 @@ package org.elasticsearch.index.store; -import com.google.common.collect.Lists; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -39,19 +37,22 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.engine.MockEngineSupport; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportModule; import org.junit.Test; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.TreeSet; +import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.notNullValue; @@ -115,7 +116,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase { private void corruptRandomTranslogFiles() throws IOException { ClusterState state = client().admin().cluster().prepareState().get().getState(); GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false); - List<ShardIterator> iterators = Lists.newArrayList(shardIterators); + List<ShardIterator> iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); assertNotNull(shardRouting); diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index 75237921fdc..e921f95394c 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -35,11 +35,11 @@ import java.util.Locale; public class IndexStoreTests extends ESTestCase { public void testStoreDirectory() throws IOException { - final Path tempDir = createTempDir(); + final Path tempDir = createTempDir().resolve("foo").resolve("0"); final IndexStoreModule.Type[] values = IndexStoreModule.Type.values(); final IndexStoreModule.Type type = RandomPicks.randomFrom(random(), values); Settings settings = Settings.settingsBuilder().put(IndexStoreModule.STORE_TYPE, type.name().toLowerCase(Locale.ROOT)).build(); - FsDirectoryService service = new FsDirectoryService(settings, null, new ShardPath(tempDir, tempDir, "foo", new ShardId("foo", 0))); + FsDirectoryService service = new FsDirectoryService(settings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { switch (type) { case NIOFS: @@ -70,9 +70,9 @@ public class IndexStoreTests extends ESTestCase { } public void testStoreDirectoryDefault() throws IOException { - final Path tempDir = createTempDir(); + final Path tempDir = createTempDir().resolve("foo").resolve("0"); Settings settings = Settings.EMPTY; - FsDirectoryService service = new FsDirectoryService(settings, null, new ShardPath(tempDir, tempDir, "foo", new ShardId("foo", 0))); + FsDirectoryService service = new FsDirectoryService(settings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { if (Constants.WINDOWS) { assertTrue(directory.toString(), directory instanceof MMapDirectory || directory instanceof SimpleFSDirectory); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 06fa074b05b..c7ebedf1cb7 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -53,9 +53,25 @@ import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.Charset; -import java.nio.file.*; -import java.util.*; -import java.util.concurrent.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; @@ -63,7 +79,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static com.google.common.collect.Lists.newArrayList; import static org.hamcrest.Matchers.*; /** @@ -546,7 +561,7 @@ public class TranslogTests extends ESTestCase { @Test public void testTranslogChecksums() throws Exception { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { @@ -570,7 +585,7 @@ public class TranslogTests extends ESTestCase { @Test public void testTruncatedTranslogs() throws Exception { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { @@ -855,7 +870,7 @@ public class TranslogTests extends ESTestCase { } public void testLocationComparison() throws IOException { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); int count = 0; for (int op = 0; op < translogOperations; op++) { @@ -884,7 +899,7 @@ public class TranslogTests extends ESTestCase { public void testBasicCheckpoint() throws IOException { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); int lastSynced = -1; for (int op = 0; op < translogOperations; op++) { @@ -970,7 +985,7 @@ public class TranslogTests extends ESTestCase { } public void testBasicRecovery() throws IOException { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); Translog.TranslogGeneration translogGeneration = null; int minUncommittedOp = -1; @@ -1012,7 +1027,7 @@ public class TranslogTests extends ESTestCase { } public void testRecoveryUncommitted() throws IOException { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); final int prepareOp = randomIntBetween(0, translogOperations-1); Translog.TranslogGeneration translogGeneration = null; @@ -1066,7 +1081,7 @@ public class TranslogTests extends ESTestCase { public void testSnapshotFromStreamInput() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); - List<Translog.Operation> ops = newArrayList(); + List<Translog.Operation> ops = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { Translog.Create test = new Translog.Create("test", "" + op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))); @@ -1079,8 +1094,8 @@ public class TranslogTests extends ESTestCase { } public void testLocationHashCodeEquals() throws IOException { - List<Translog.Location> locations = newArrayList(); - List<Translog.Location> locations2 = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); + List<Translog.Location> locations2 = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); try(Translog translog2 = create(createTempDir())) { for (int op = 0; op < translogOperations; op++) { @@ -1107,7 +1122,7 @@ public class TranslogTests extends ESTestCase { } public void testOpenForeignTranslog() throws IOException { - List<Translog.Location> locations = newArrayList(); + List<Translog.Location> locations = new ArrayList<>(); int translogOperations = randomIntBetween(1, 10); int firstUncommitted = 0; for (int op = 0; op < translogOperations; op++) { diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 8f27e679db8..bc2b71eb3f1 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices.analysis; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; @@ -32,6 +31,7 @@ import org.junit.Test; import java.lang.reflect.Field; import java.util.Collection; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; @@ -53,7 +53,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Test public void testThatPreBuiltAnalyzersAreNotClosedOnIndexClose() throws Exception { Map<PreBuiltAnalyzers, List<Version>> loadedAnalyzers = Maps.newHashMap(); - List<String> indexNames = Lists.newArrayList(); + List<String> indexNames = new ArrayList<>(); final int numIndices = scaledRandomIntBetween(2, 4); for (int i = 0; i < numIndices; i++) { String indexName = randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -65,7 +65,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { Version randomVersion = randomVersion(random()); if (!loadedAnalyzers.containsKey(preBuiltAnalyzer)) { - loadedAnalyzers.put(preBuiltAnalyzer, Lists.<Version>newArrayList()); + loadedAnalyzers.put(preBuiltAnalyzer, new ArrayList<Version>()); } loadedAnalyzers.get(preBuiltAnalyzer).add(randomVersion); diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index db46e8201d4..c632fa24668 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -253,7 +253,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { ensureGreen(); client().admin().indices().preparePutMapping("test") - .setType("document").setSource("simple", "type=string,analyzer=simple,position_offset_gap=100").get(); + .setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get(); String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"}; diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 71ccf68e41a..a7ad9285aee 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.indices.mapping; -import com.google.common.collect.Lists; - import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -39,13 +37,15 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.hamcrest.Matchers; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; @@ -64,7 +64,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { int recCount = randomIntBetween(200, 600); int numberOfTypes = randomIntBetween(1, 5); - List<IndexRequestBuilder> indexRequests = Lists.newArrayList(); + List<IndexRequestBuilder> indexRequests = new ArrayList<>(); for (int rec = 0; rec < recCount; rec++) { String type = "type" + (rec % numberOfTypes); String fieldName = "field_" + type + "_" + rec; diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index 29e057d612c..4992b9f72dc 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -27,9 +27,9 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -59,7 +59,7 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); } @@ -77,7 +77,7 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index fb9e9cb935f..4ffe6361cb5 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -40,10 +40,10 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -111,7 +111,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", "value" + id)); } @@ -158,7 +158,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("ramtest", "type", Long.toString(id)).setSource("test", "value" + id)); } @@ -207,7 +207,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", "value" + id)); } @@ -273,7 +273,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // index some different terms so we have some field data for loading int docCount = scaledRandomIntBetween(300, 1000); - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 8f8b12d741c..396d20e5915 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -54,7 +53,12 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; import org.junit.Test; import java.io.IOException; @@ -69,7 +73,13 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; /** * @@ -161,18 +171,17 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - assertThat(response.shardResponses().size(), equalTo(SHARD_COUNT)); - assertThat(response.shardResponses().get(INDEX_NAME).size(), equalTo(1)); + assertThat(response.shardRecoveryStates().size(), equalTo(SHARD_COUNT)); + assertThat(response.shardRecoveryStates().get(INDEX_NAME).size(), equalTo(1)); - List<ShardRecoveryResponse> shardResponses = response.shardResponses().get(INDEX_NAME); - assertThat(shardResponses.size(), equalTo(1)); + List<RecoveryState> recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); + assertThat(recoveryStates.size(), equalTo(1)); - ShardRecoveryResponse shardResponse = shardResponses.get(0); - RecoveryState state = shardResponse.recoveryState(); + RecoveryState recoveryState = recoveryStates.get(0); - assertRecoveryState(state, 0, Type.STORE, Stage.DONE, node, node, false); + assertRecoveryState(recoveryState, 0, Type.STORE, Stage.DONE, node, node, false); - validateIndexRecoveryState(state.getIndex()); + validateIndexRecoveryState(recoveryState.getIndex()); } @Test @@ -189,8 +198,8 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).setActiveOnly(true).execute().actionGet(); - List<ShardRecoveryResponse> shardResponses = response.shardResponses().get(INDEX_NAME); - assertThat(shardResponses.size(), equalTo(0)); // Should not expect any responses back + List<RecoveryState> recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); + assertThat(recoveryStates.size(), equalTo(0)); // Should not expect any responses back } @Test @@ -215,23 +224,23 @@ public class IndexRecoveryIT extends ESIntegTestCase { RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); // we should now have two total shards, one primary and one replica - List<ShardRecoveryResponse> shardResponses = response.shardResponses().get(INDEX_NAME); - assertThat(shardResponses.size(), equalTo(2)); + List<RecoveryState> recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); + assertThat(recoveryStates.size(), equalTo(2)); - List<ShardRecoveryResponse> nodeAResponses = findRecoveriesForTargetNode(nodeA, shardResponses); + List<RecoveryState> nodeAResponses = findRecoveriesForTargetNode(nodeA, recoveryStates); assertThat(nodeAResponses.size(), equalTo(1)); - List<ShardRecoveryResponse> nodeBResponses = findRecoveriesForTargetNode(nodeB, shardResponses); + List<RecoveryState> nodeBResponses = findRecoveriesForTargetNode(nodeB, recoveryStates); assertThat(nodeBResponses.size(), equalTo(1)); // validate node A recovery - ShardRecoveryResponse nodeAShardResponse = nodeAResponses.get(0); - assertRecoveryState(nodeAShardResponse.recoveryState(), 0, Type.STORE, Stage.DONE, nodeA, nodeA, false); - validateIndexRecoveryState(nodeAShardResponse.recoveryState().getIndex()); + RecoveryState nodeARecoveryState = nodeAResponses.get(0); + assertRecoveryState(nodeARecoveryState, 0, Type.STORE, Stage.DONE, nodeA, nodeA, false); + validateIndexRecoveryState(nodeARecoveryState.getIndex()); // validate node B recovery - ShardRecoveryResponse nodeBShardResponse = nodeBResponses.get(0); - assertRecoveryState(nodeBShardResponse.recoveryState(), 0, Type.REPLICA, Stage.DONE, nodeA, nodeB, false); - validateIndexRecoveryState(nodeBShardResponse.recoveryState().getIndex()); + RecoveryState nodeBRecoveryState = nodeBResponses.get(0); + assertRecoveryState(nodeBRecoveryState, 0, Type.REPLICA, Stage.DONE, nodeA, nodeB, false); + validateIndexRecoveryState(nodeBRecoveryState.getIndex()); } @Test @@ -272,17 +281,17 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - List<ShardRecoveryResponse> shardResponses = response.shardResponses().get(INDEX_NAME); - List<ShardRecoveryResponse> nodeAResponses = findRecoveriesForTargetNode(nodeA, shardResponses); - assertThat(nodeAResponses.size(), equalTo(1)); - List<ShardRecoveryResponse> nodeBResponses = findRecoveriesForTargetNode(nodeB, shardResponses); - assertThat(nodeBResponses.size(), equalTo(1)); + List<RecoveryState> recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); + List<RecoveryState> nodeARecoveryStates = findRecoveriesForTargetNode(nodeA, recoveryStates); + assertThat(nodeARecoveryStates.size(), equalTo(1)); + List<RecoveryState> nodeBRecoveryStates = findRecoveriesForTargetNode(nodeB, recoveryStates); + assertThat(nodeBRecoveryStates.size(), equalTo(1)); - assertRecoveryState(nodeAResponses.get(0).recoveryState(), 0, Type.STORE, Stage.DONE, nodeA, nodeA, false); - validateIndexRecoveryState(nodeAResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(nodeARecoveryStates.get(0), 0, Type.STORE, Stage.DONE, nodeA, nodeA, false); + validateIndexRecoveryState(nodeARecoveryStates.get(0).getIndex()); - assertOnGoingRecoveryState(nodeBResponses.get(0).recoveryState(), 0, Type.RELOCATION, nodeA, nodeB, false); - validateIndexRecoveryState(nodeBResponses.get(0).recoveryState().getIndex()); + assertOnGoingRecoveryState(nodeBRecoveryStates.get(0), 0, Type.RELOCATION, nodeA, nodeB, false); + validateIndexRecoveryState(nodeBRecoveryStates.get(0).getIndex()); logger.info("--> request node recovery stats"); NodesStatsResponse statsResponse = client().admin().cluster().prepareNodesStats().clear().setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Recovery)).get(); @@ -331,11 +340,11 @@ public class IndexRecoveryIT extends ESIntegTestCase { response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - shardResponses = response.shardResponses().get(INDEX_NAME); - assertThat(shardResponses.size(), equalTo(1)); + recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); + assertThat(recoveryStates.size(), equalTo(1)); - assertRecoveryState(shardResponses.get(0).recoveryState(), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); - validateIndexRecoveryState(shardResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(recoveryStates.get(0), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); + validateIndexRecoveryState(recoveryStates.get(0).getIndex()); statsResponse = client().admin().cluster().prepareNodesStats().clear().setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Recovery)).get(); assertThat(statsResponse.getNodes(), arrayWithSize(2)); @@ -383,45 +392,45 @@ public class IndexRecoveryIT extends ESIntegTestCase { .execute().actionGet().getState(); response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - shardResponses = response.shardResponses().get(INDEX_NAME); + recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); - nodeAResponses = findRecoveriesForTargetNode(nodeA, shardResponses); - assertThat(nodeAResponses.size(), equalTo(1)); - nodeBResponses = findRecoveriesForTargetNode(nodeB, shardResponses); - assertThat(nodeBResponses.size(), equalTo(1)); - List<ShardRecoveryResponse> nodeCResponses = findRecoveriesForTargetNode(nodeC, shardResponses); - assertThat(nodeCResponses.size(), equalTo(1)); + nodeARecoveryStates = findRecoveriesForTargetNode(nodeA, recoveryStates); + assertThat(nodeARecoveryStates.size(), equalTo(1)); + nodeBRecoveryStates = findRecoveriesForTargetNode(nodeB, recoveryStates); + assertThat(nodeBRecoveryStates.size(), equalTo(1)); + List<RecoveryState> nodeCRecoveryStates = findRecoveriesForTargetNode(nodeC, recoveryStates); + assertThat(nodeCRecoveryStates.size(), equalTo(1)); - assertRecoveryState(nodeAResponses.get(0).recoveryState(), 0, Type.REPLICA, Stage.DONE, nodeB, nodeA, false); - validateIndexRecoveryState(nodeAResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(nodeARecoveryStates.get(0), 0, Type.REPLICA, Stage.DONE, nodeB, nodeA, false); + validateIndexRecoveryState(nodeARecoveryStates.get(0).getIndex()); - assertRecoveryState(nodeBResponses.get(0).recoveryState(), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); - validateIndexRecoveryState(nodeBResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(nodeBRecoveryStates.get(0), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); + validateIndexRecoveryState(nodeBRecoveryStates.get(0).getIndex()); // relocations of replicas are marked as REPLICA and the source node is the node holding the primary (B) - assertOnGoingRecoveryState(nodeCResponses.get(0).recoveryState(), 0, Type.REPLICA, nodeB, nodeC, false); - validateIndexRecoveryState(nodeCResponses.get(0).recoveryState().getIndex()); + assertOnGoingRecoveryState(nodeCRecoveryStates.get(0), 0, Type.REPLICA, nodeB, nodeC, false); + validateIndexRecoveryState(nodeCRecoveryStates.get(0).getIndex()); logger.info("--> speeding up recoveries"); restoreRecoverySpeed(); ensureGreen(); response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - shardResponses = response.shardResponses().get(INDEX_NAME); + recoveryStates = response.shardRecoveryStates().get(INDEX_NAME); - nodeAResponses = findRecoveriesForTargetNode(nodeA, shardResponses); - assertThat(nodeAResponses.size(), equalTo(0)); - nodeBResponses = findRecoveriesForTargetNode(nodeB, shardResponses); - assertThat(nodeBResponses.size(), equalTo(1)); - nodeCResponses = findRecoveriesForTargetNode(nodeC, shardResponses); - assertThat(nodeCResponses.size(), equalTo(1)); + nodeARecoveryStates = findRecoveriesForTargetNode(nodeA, recoveryStates); + assertThat(nodeARecoveryStates.size(), equalTo(0)); + nodeBRecoveryStates = findRecoveriesForTargetNode(nodeB, recoveryStates); + assertThat(nodeBRecoveryStates.size(), equalTo(1)); + nodeCRecoveryStates = findRecoveriesForTargetNode(nodeC, recoveryStates); + assertThat(nodeCRecoveryStates.size(), equalTo(1)); - assertRecoveryState(nodeBResponses.get(0).recoveryState(), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); - validateIndexRecoveryState(nodeBResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(nodeBRecoveryStates.get(0), 0, Type.RELOCATION, Stage.DONE, nodeA, nodeB, false); + validateIndexRecoveryState(nodeBRecoveryStates.get(0).getIndex()); // relocations of replicas are marked as REPLICA and the source node is the node holding the primary (B) - assertRecoveryState(nodeCResponses.get(0).recoveryState(), 0, Type.REPLICA, Stage.DONE, nodeB, nodeC, false); - validateIndexRecoveryState(nodeCResponses.get(0).recoveryState().getIndex()); + assertRecoveryState(nodeCRecoveryStates.get(0), 0, Type.REPLICA, Stage.DONE, nodeB, nodeC, false); + validateIndexRecoveryState(nodeCRecoveryStates.get(0).getIndex()); } @Test @@ -463,24 +472,24 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); - for (Map.Entry<String, List<ShardRecoveryResponse>> shardRecoveryResponse : response.shardResponses().entrySet()) { + for (Map.Entry<String, List<RecoveryState>> indexRecoveryStates : response.shardRecoveryStates().entrySet()) { - assertThat(shardRecoveryResponse.getKey(), equalTo(INDEX_NAME)); - List<ShardRecoveryResponse> shardRecoveryResponses = shardRecoveryResponse.getValue(); - assertThat(shardRecoveryResponses.size(), equalTo(totalShards)); + assertThat(indexRecoveryStates.getKey(), equalTo(INDEX_NAME)); + List<RecoveryState> recoveryStates = indexRecoveryStates.getValue(); + assertThat(recoveryStates.size(), equalTo(totalShards)); - for (ShardRecoveryResponse shardResponse : shardRecoveryResponses) { - assertRecoveryState(shardResponse.recoveryState(), 0, Type.SNAPSHOT, Stage.DONE, null, nodeA, true); - validateIndexRecoveryState(shardResponse.recoveryState().getIndex()); + for (RecoveryState recoveryState : recoveryStates) { + assertRecoveryState(recoveryState, 0, Type.SNAPSHOT, Stage.DONE, null, nodeA, true); + validateIndexRecoveryState(recoveryState.getIndex()); } } } - private List<ShardRecoveryResponse> findRecoveriesForTargetNode(String nodeName, List<ShardRecoveryResponse> responses) { - List<ShardRecoveryResponse> nodeResponses = new ArrayList<>(); - for (ShardRecoveryResponse response : responses) { - if (response.recoveryState().getTargetNode().getName().equals(nodeName)) { - nodeResponses.add(response); + private List<RecoveryState> findRecoveriesForTargetNode(String nodeName, List<RecoveryState> recoveryStates) { + List<RecoveryState> nodeResponses = new ArrayList<>(); + for (RecoveryState recoveryState : recoveryStates) { + if (recoveryState.getTargetNode().getName().equals(nodeName)) { + nodeResponses.add(recoveryState); } } return nodeResponses; diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b8185250761..4ffc09b22d5 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.indices.template; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequestValidationException; @@ -41,6 +40,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; @@ -235,7 +235,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { GetIndexTemplatesResponse getTemplate1Response = client().admin().indices().prepareGetTemplates("template_*").execute().actionGet(); assertThat(getTemplate1Response.getIndexTemplates(), hasSize(2)); - List<String> templateNames = Lists.newArrayList(); + List<String> templateNames = new ArrayList<>(); templateNames.add(getTemplate1Response.getIndexTemplates().get(0).name()); templateNames.add(getTemplate1Response.getIndexTemplates().get(1).name()); assertThat(templateNames, containsInAnyOrder("template_1", "template_2")); @@ -244,7 +244,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { getTemplate1Response = client().admin().indices().prepareGetTemplates("template*").execute().actionGet(); assertThat(getTemplate1Response.getIndexTemplates(), hasSize(3)); - templateNames = Lists.newArrayList(); + templateNames = new ArrayList<>(); templateNames.add(getTemplate1Response.getIndexTemplates().get(0).name()); templateNames.add(getTemplate1Response.getIndexTemplates().get(1).name()); templateNames.add(getTemplate1Response.getIndexTemplates().get(2).name()); @@ -254,7 +254,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { getTemplate1Response = client().admin().indices().prepareGetTemplates("template_1", "template_2").execute().actionGet(); assertThat(getTemplate1Response.getIndexTemplates(), hasSize(2)); - templateNames = Lists.newArrayList(); + templateNames = new ArrayList<>(); templateNames.add(getTemplate1Response.getIndexTemplates().get(0).name()); templateNames.add(getTemplate1Response.getIndexTemplates().get(1).name()); assertThat(templateNames, containsInAnyOrder("template_1", "template_2")); diff --git a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java index 6179af72d34..3a00be0aeb9 100644 --- a/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableList; - import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; @@ -49,7 +48,10 @@ import org.junit.Test; import java.util.Locale; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; public class SimpleIndicesWarmerIT extends ESIntegTestCase { @@ -173,6 +175,18 @@ public class SimpleIndicesWarmerIT extends ESIntegTestCase { } } + @Test // issue 8991 + public void deleteAllIndexWarmerDoesNotThrowWhenNoWarmers() { + createIndex("test"); + DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer() + .setIndices("test").setNames("_all").execute().actionGet(); + assertThat(deleteWarmerResponse.isAcknowledged(), equalTo(true)); + + deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer() + .setIndices("test").setNames("foo", "_all", "bar").execute().actionGet(); + assertThat(deleteWarmerResponse.isAcknowledged(), equalTo(true)); + } + @Test public void deleteIndexWarmerTest() { createIndex("test"); @@ -260,7 +274,7 @@ public class SimpleIndicesWarmerIT extends ESIntegTestCase { for (IndexShardSegments indexShardSegments : indicesSegments) { for (ShardSegments shardSegments : indexShardSegments) { for (Segment segment : shardSegments) { - logger.debug("+=" + segment.memoryInBytes + " " + indexShardSegments.getShardId() + " " + shardSegments.getIndex()); + logger.debug("+=" + segment.memoryInBytes + " " + indexShardSegments.getShardId() + " " + shardSegments.getShardRouting().getIndex()); total += segment.memoryInBytes; } } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 58180b277f8..232169474b2 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.plugins; import com.google.common.base.Function; -import com.google.common.collect.Lists; - import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; import org.elasticsearch.test.ESTestCase; @@ -33,6 +31,7 @@ import java.nio.file.Path; import java.util.List; import java.util.Properties; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; import static org.hamcrest.Matchers.contains; public class PluginInfoTests extends ESTestCase { @@ -282,7 +281,7 @@ public class PluginInfoTests extends ESTestCase { pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true)); final List<PluginInfo> infos = pluginsInfo.getInfos(); - List<String> names = Lists.transform(infos, new Function<PluginInfo, String>() { + List<String> names = eagerTransform(infos, new Function<PluginInfo, String>() { @Override public String apply(PluginInfo input) { return input.getName(); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 60a6acb7ed9..450212b75b5 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -284,7 +284,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().get(); for (ShardStats shardStats : indicesStatsResponse.getShards()) { DocsStats docsStats = shardStats.getStats().docs; - logger.info("shard [{}] - count {}, primary {}", shardStats.getShardId(), docsStats.getCount(), shardStats.getShardRouting().primary()); + logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary()); } //if there was an error we try to wait and see if at some point it'll get fixed diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 11016ee1b4c..124f055b334 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -64,7 +64,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; -import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -422,7 +421,7 @@ public class RelocationIT extends ESIntegTestCase { public boolean apply(Object input) { RecoveryResponse recoveryResponse = internalCluster().client(redNodeName).admin().indices().prepareRecoveries(indexName) .get(); - return !recoveryResponse.shardResponses().get(indexName).isEmpty(); + return !recoveryResponse.shardRecoveryStates().get(indexName).isEmpty(); } } ); diff --git a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index a8ad99d3b39..5760c284161 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest; -import com.google.common.collect.Lists; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -30,7 +29,12 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.junit.Test; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -57,7 +61,7 @@ public class RestFilterChainTests extends ESTestCase { restController.registerFilter(testFilter); } - ArrayList<RestFilter> restFiltersByOrder = Lists.newArrayList(filters); + ArrayList<RestFilter> restFiltersByOrder = new ArrayList<>(filters); Collections.sort(restFiltersByOrder, new Comparator<RestFilter>() { @Override public int compare(RestFilter o1, RestFilter o2) { @@ -65,7 +69,7 @@ public class RestFilterChainTests extends ESTestCase { } }); - List<RestFilter> expectedRestFilters = Lists.newArrayList(); + List<RestFilter> expectedRestFilters = new ArrayList<>(); for (RestFilter filter : restFiltersByOrder) { TestFilter testFilter = (TestFilter) filter; expectedRestFilters.add(testFilter); @@ -87,7 +91,7 @@ public class RestFilterChainTests extends ESTestCase { assertThat(fakeRestChannel.await(), equalTo(true)); - List<TestFilter> testFiltersByLastExecution = Lists.newArrayList(); + List<TestFilter> testFiltersByLastExecution = new ArrayList<>(); for (RestFilter restFilter : filters) { testFiltersByLastExecution.add((TestFilter)restFilter); } @@ -98,7 +102,7 @@ public class RestFilterChainTests extends ESTestCase { } }); - ArrayList<TestFilter> finalTestFilters = Lists.newArrayList(); + ArrayList<TestFilter> finalTestFilters = new ArrayList<>(); for (RestFilter filter : testFiltersByLastExecution) { TestFilter testFilter = (TestFilter) filter; finalTestFilters.add(testFilter); diff --git a/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java b/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java index 8df2c205b38..ed4c999ad22 100644 --- a/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java +++ b/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java @@ -28,18 +28,12 @@ import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.equalTo; /** @@ -67,7 +61,7 @@ public class GroovyScriptIT extends ESIntegTestCase { @Test public void testGroovyExceptionSerialization() throws Exception { - List<IndexRequestBuilder> reqs = newArrayList(); + List<IndexRequestBuilder> reqs = new ArrayList<>(); for (int i = 0; i < randomIntBetween(50, 500); i++) { reqs.add(client().prepareIndex("test", "doc", "" + i).setSource("foo", "bar")); } diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 3c35a286e3b..e0a7d4ba3cd 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.script; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -36,6 +35,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.Test; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -83,7 +83,7 @@ public class NativeScriptTests extends ESTestCase { Map<String, NativeScriptFactory> nativeScriptFactoryMap = new HashMap<>(); nativeScriptFactoryMap.put("my", new MyNativeScriptFactory()); Set<ScriptEngineService> scriptEngineServices = ImmutableSet.<ScriptEngineService>of(new NativeScriptEngineService(settings, nativeScriptFactoryMap)); - ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Lists.<ScriptContext.Plugin>newArrayList()); + ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<ScriptContext.Plugin>()); ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextRegistryTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextRegistryTests.java index 8bb77b48fef..c7d3a52bf7e 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextRegistryTests.java @@ -19,12 +19,13 @@ package org.elasticsearch.script; -import com.google.common.collect.Lists; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; public class ScriptContextRegistryTests extends ESTestCase { @@ -33,7 +34,7 @@ public class ScriptContextRegistryTests extends ESTestCase { for (final String rejectedContext : ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS) { try { //try to register a prohibited script context - new ScriptContextRegistry(Lists.newArrayList(new ScriptContext.Plugin("test", rejectedContext))); + new ScriptContextRegistry(Arrays.asList(new ScriptContext.Plugin("test", rejectedContext))); fail("ScriptContextRegistry initialization should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), Matchers.containsString("[" + rejectedContext + "] is a reserved name, it cannot be registered as a custom script context")); @@ -46,7 +47,7 @@ public class ScriptContextRegistryTests extends ESTestCase { for (final String rejectedContext : ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS) { try { //try to register a prohibited script context - new ScriptContextRegistry(Lists.newArrayList(new ScriptContext.Plugin(rejectedContext, "test"))); + new ScriptContextRegistry(Collections.singleton(new ScriptContext.Plugin(rejectedContext, "test"))); fail("ScriptContextRegistry initialization should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), Matchers.containsString("[" + rejectedContext + "] is a reserved name, it cannot be registered as a custom script context")); @@ -68,7 +69,7 @@ public class ScriptContextRegistryTests extends ESTestCase { public void testDuplicatedPluginScriptContexts() throws IOException { try { //try to register a prohibited script context - new ScriptContextRegistry(Lists.newArrayList(new ScriptContext.Plugin("testplugin", "test"), new ScriptContext.Plugin("testplugin", "test"))); + new ScriptContextRegistry(Arrays.asList(new ScriptContext.Plugin("testplugin", "test"), new ScriptContext.Plugin("testplugin", "test"))); fail("ScriptContextRegistry initialization should have failed"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), Matchers.containsString("script context [testplugin_test] cannot be registered twice")); @@ -77,6 +78,6 @@ public class ScriptContextRegistryTests extends ESTestCase { @Test public void testNonDuplicatedPluginScriptContexts() throws IOException { - new ScriptContextRegistry(Lists.newArrayList(new ScriptContext.Plugin("testplugin1", "test"), new ScriptContext.Plugin("testplugin2", "test"))); + new ScriptContextRegistry(Arrays.asList(new ScriptContext.Plugin("testplugin1", "test"), new ScriptContext.Plugin("testplugin2", "test"))); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index 8944294fd42..e395b0ad528 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations; -import com.google.common.collect.Lists; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -28,6 +27,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; +import java.util.ArrayList; import java.util.List; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; @@ -45,7 +45,7 @@ public class AggregationsIntegrationIT extends ESIntegTestCase { assertAcked(prepareCreate("index").addMapping("type", "f", "type=string").get()); ensureYellow("index"); numDocs = randomIntBetween(1, 20); - List<IndexRequestBuilder> docs = Lists.newArrayList(); + List<IndexRequestBuilder> docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { docs.add(client().prepareIndex("index", "type").setSource("f", Integer.toString(i / 3))); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index abe88af10aa..0d881feac6d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntHashSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -49,19 +48,8 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; -import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.max; -import static org.elasticsearch.search.aggregations.AggregationBuilders.min; -import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles; -import static org.elasticsearch.search.aggregations.AggregationBuilders.range; -import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; -import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; @@ -202,7 +190,7 @@ public class EquivalenceIT extends ESIntegTestCase { .endObject() .endObject()).execute().actionGet(); - List<IndexRequestBuilder> indexingRequests = Lists.newArrayList(); + List<IndexRequestBuilder> indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { final int[] values = new int[randomInt(4)]; for (int j = 0; j < values.length; ++j) { @@ -329,7 +317,7 @@ public class EquivalenceIT extends ESIntegTestCase { final int numDocs = scaledRandomIntBetween(2500, 5000); logger.info("Indexing [" + numDocs +"] docs"); - List<IndexRequestBuilder> indexingRequests = Lists.newArrayList(); + List<IndexRequestBuilder> indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { indexingRequests.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource("double_value", randomDouble())); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index 169901aad59..c729c2f2d04 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -255,9 +255,15 @@ public class ChildrenIT extends ESIntegTestCase { assertThat(count.getValue(), equalTo(4.)); String idToUpdate = Integer.toString(randomInt(3)); + /* + * The whole point of this test is to test these things with deleted + * docs in the index so we turn off detect_noop to make sure that + * the updates cause that. + */ UpdateResponse updateResponse = client().prepareUpdate(indexName, "child", idToUpdate) .setParent("1") .setDoc("count", 1) + .setDetectNoop(false) .get(); assertThat(updateResponse.getVersion(), greaterThan(1l)); refresh(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index 584da8a5b5f..13ce893724f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -87,7 +88,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -102,7 +103,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(avg("avg").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -128,7 +129,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -144,7 +145,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("value").script(new Script("_value + 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -162,7 +163,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -174,7 +175,7 @@ public class AvgIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(avg("avg").format("#").field("value")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -191,7 +192,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("values")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -207,7 +208,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("values").script(new Script("_value + 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -225,7 +226,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -241,7 +242,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").script(new Script("doc['value'].value"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -259,7 +260,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -277,7 +278,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -293,7 +294,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -309,7 +310,7 @@ public class AvgIT extends AbstractNumericTestCase { .addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); @@ -328,7 +329,7 @@ public class AvgIT extends AbstractNumericTestCase { avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Avg avg = searchResponse.getAggregations().get("avg"); assertThat(avg, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index d4ce88eea66..d54dc7a3d08 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -34,6 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -121,7 +122,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("value").sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -147,7 +148,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -168,7 +169,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -199,7 +200,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -249,7 +250,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("value").sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -274,7 +275,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("value").script(new Script("_value + 1")).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -303,7 +304,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -328,7 +329,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("values").sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -353,7 +354,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").field("values").script(new Script("_value - 1")).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -382,7 +383,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -407,7 +408,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").script(new Script("doc['value'].value")).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -435,7 +436,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -463,7 +464,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -488,7 +489,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .addAggregation(extendedStats("stats").script(new Script("doc['values'].values")).sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -514,7 +515,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .execute().actionGet(); assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -545,7 +546,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { .sigma(sigma)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 6b8c39e9214..24abca0b99e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.metrics; -import com.google.common.collect.Lists; - import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.script.Script; @@ -37,15 +35,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.global; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.percentileRanks; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.sameInstance; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.*; /** * @@ -79,7 +73,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { } private void assertConsistent(double[] pcts, PercentileRanks percentiles, long minValue, long maxValue, int numberSigDigits) { - final List<Percentile> percentileList = Lists.newArrayList(percentiles); + final List<Percentile> percentileList = iterableAsArrayList(percentiles); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -166,7 +160,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -185,7 +179,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").percentiles(pcts))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -212,7 +206,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -230,7 +224,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -248,7 +242,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -269,7 +263,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -287,7 +281,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("values").percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -305,7 +299,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("values").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -322,7 +316,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("values").script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits); @@ -343,7 +337,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -361,7 +355,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -382,7 +376,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -403,7 +397,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -421,7 +415,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -439,7 +433,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -463,7 +457,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -483,7 +477,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits).percentiles(99)) .order(Order.aggregation("percentile_ranks", "99", asc))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index ebbbf1eb18c..b5568f918cf 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -18,10 +18,9 @@ */ package org.elasticsearch.search.aggregations.metrics; -import com.google.common.collect.Lists; - import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -38,15 +37,9 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.global; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.sameInstance; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.*; /** * @@ -79,7 +72,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { } private void assertConsistent(double[] pcts, Percentiles percentiles, long minValue, long maxValue, int numberSigDigits) { - final List<Percentile> percentileList = Lists.newArrayList(percentiles); + final List<Percentile> percentileList = CollectionUtils.iterableAsArrayList(percentiles); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -167,7 +160,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -187,7 +180,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .field("value") .percentiles(pcts))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -216,7 +209,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -234,7 +227,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") .script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -255,7 +248,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -274,7 +267,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -292,7 +285,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") .script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -309,7 +302,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") .script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits); @@ -330,7 +323,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -348,7 +341,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) .script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); @@ -369,7 +362,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -390,7 +383,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); @@ -408,7 +401,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -426,7 +419,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); @@ -450,7 +443,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); @@ -470,7 +463,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .percentiles(99)) .order(Order.aggregation("percentiles", "99", asc))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java index 04a3a90dfd3..793565d33c4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -86,7 +87,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -99,7 +100,7 @@ public class MaxIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(max("max").format("0000.0").field("value")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -115,7 +116,7 @@ public class MaxIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(max("max").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -142,7 +143,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -158,7 +159,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("value").script(new Script("_value + 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -176,7 +177,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -192,7 +193,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("values")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -208,7 +209,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("values").script(new Script("_value + 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -226,7 +227,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -242,7 +243,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").script(new Script("doc['value'].value"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -260,7 +261,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -278,7 +279,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -294,7 +295,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").script(new Script("doc['values'].values"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -310,7 +311,7 @@ public class MaxIT extends AbstractNumericTestCase { .addAggregation(max("max").script(new Script("doc['values'].values"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); @@ -328,7 +329,7 @@ public class MaxIT extends AbstractNumericTestCase { max("max").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Max max = searchResponse.getAggregations().get("max"); assertThat(max, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java index 38e4e1ef555..00cb2dda27d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -86,7 +87,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -99,7 +100,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").format("0000.0").field("value")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -115,7 +116,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(min("min").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -142,7 +143,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -158,7 +159,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("value").script(new Script("_value - 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -176,7 +177,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -192,7 +193,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("values")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -207,7 +208,7 @@ public class MinIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(min("min").field("values").script(new Script("_value - 1"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -222,7 +223,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").field("values").script(new Script("_value * -1"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -239,7 +240,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params))).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -253,7 +254,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(new Script("doc['value'].value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -270,7 +271,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -287,7 +288,7 @@ public class MinIT extends AbstractNumericTestCase { .addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -301,7 +302,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -315,7 +316,7 @@ public class MinIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); @@ -337,7 +338,7 @@ public class MinIT extends AbstractNumericTestCase { "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", ScriptType.INLINE, null, params))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Min min = searchResponse.getAggregations().get("min"); assertThat(min, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index d4e6ea9b527..f838a89fe18 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -34,6 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -103,7 +104,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -120,7 +121,7 @@ public class StatsIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(stats("stats").format("0000.0").field("value")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -144,7 +145,7 @@ public class StatsIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(stats("stats").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -186,7 +187,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -208,7 +209,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -232,7 +233,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -254,7 +255,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -276,7 +277,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -300,7 +301,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -322,7 +323,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -346,7 +347,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -370,7 +371,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -392,7 +393,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -414,7 +415,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); @@ -440,7 +441,7 @@ public class StatsIT extends AbstractNumericTestCase { assertShardExecutionState(searchResponse, 0); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 6351bb68d87..d87e6393f77 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -86,7 +87,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -99,7 +100,7 @@ public class SumIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(sum("sum").format("0000.0").field("value")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -115,7 +116,7 @@ public class SumIT extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(sum("sum").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -141,7 +142,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -157,7 +158,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("value").script(new Script("_value + 1"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -175,7 +176,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("value").script(new Script("_value + increment", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -191,7 +192,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").script(new Script("doc['value'].value"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -209,7 +210,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -227,7 +228,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -244,7 +245,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -260,7 +261,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]"))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -279,7 +280,7 @@ public class SumIT extends AbstractNumericTestCase { sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -296,7 +297,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("values")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -312,7 +313,7 @@ public class SumIT extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(sum("sum").field("values").script(new Script("_value + 1"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -329,7 +330,7 @@ public class SumIT extends AbstractNumericTestCase { .addAggregation(sum("sum").field("values").script(new Script("_value + increment", ScriptType.INLINE, null, params))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index b1be56a973b..8499f754602 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -18,10 +18,9 @@ */ package org.elasticsearch.search.aggregations.metrics; -import com.google.common.collect.Lists; - import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -38,14 +37,9 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.global; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.percentileRanks; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.sameInstance; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.*; /** * @@ -82,7 +76,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { } private void assertConsistent(double[] pcts, PercentileRanks percentiles, long minValue, long maxValue) { - final List<Percentile> percentileList = Lists.newArrayList(percentiles); + final List<Percentile> percentileList = CollectionUtils.iterableAsArrayList(percentiles); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -159,7 +153,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -177,7 +171,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { randomCompression(percentileRanks("percentile_ranks")).field("value").percentiles(pcts))).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -203,7 +197,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -220,7 +214,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -237,7 +231,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -257,7 +251,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -274,7 +268,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -291,7 +285,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -307,7 +301,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, -maxValues, -minValues); @@ -327,7 +321,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -344,7 +338,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -364,7 +358,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -384,7 +378,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -401,7 +395,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -418,7 +412,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -439,7 +433,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -456,7 +450,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .order(Order.aggregation("percentile_ranks", "99", asc))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 60e385ecc1f..65b0ea596c8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -18,10 +18,9 @@ */ package org.elasticsearch.search.aggregations.metrics; -import com.google.common.collect.Lists; - import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -38,14 +37,9 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.search.aggregations.AggregationBuilders.global; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.sameInstance; +import static org.elasticsearch.search.aggregations.AggregationBuilders.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.*; /** * @@ -81,7 +75,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { } private void assertConsistent(double[] pcts, Percentiles percentiles, long minValue, long maxValue) { - final List<Percentile> percentileList = Lists.newArrayList(percentiles); + final List<Percentile> percentileList = CollectionUtils.iterableAsArrayList(percentiles); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -159,7 +153,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -176,7 +170,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -203,7 +197,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -220,7 +214,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -240,7 +234,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -257,7 +251,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -274,7 +268,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -290,7 +284,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, -maxValues, -minValues); @@ -310,7 +304,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -327,7 +321,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue, maxValue); @@ -347,7 +341,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -367,7 +361,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); @@ -384,7 +378,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -401,7 +395,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues, maxValues); @@ -422,7 +416,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .percentiles(pcts)) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); final Percentiles percentiles = searchResponse.getAggregations().get("percentiles"); assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); @@ -439,7 +433,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .order(Order.aggregation("percentiles", "99", asc))) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 6fd4e833b9c..a2f81066cb1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.count; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -82,7 +83,7 @@ public class ValueCountIT extends ESIntegTestCase { .addAggregation(count("count").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -96,7 +97,7 @@ public class ValueCountIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(count("count").field("value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -121,7 +122,7 @@ public class ValueCountIT extends ESIntegTestCase { .addAggregation(count("count").field("value")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -137,7 +138,7 @@ public class ValueCountIT extends ESIntegTestCase { .addAggregation(count("count").field("values")) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -150,7 +151,7 @@ public class ValueCountIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script("doc['value'].value"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -163,7 +164,7 @@ public class ValueCountIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script("doc['values'].values"))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -178,7 +179,7 @@ public class ValueCountIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script("doc[s].value", ScriptType.INLINE, null, params))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); @@ -193,7 +194,7 @@ public class ValueCountIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script("doc[s].values", ScriptType.INLINE, null, params))).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); ValueCount valueCount = searchResponse.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index f00e95d6e44..cdbe3be602f 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -26,30 +26,18 @@ import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.CoreMatchers; -import org.junit.Ignore; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.Map; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; -import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; -import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; +import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; public class RandomScoreFunctionIT extends ESIntegTestCase { @@ -121,7 +109,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { - client().prepareIndex("test", "type", "" + i).setSource("body", randomFrom(newArrayList("foo", "bar", "baz")), "index", i) + client().prepareIndex("test", "type", "" + i).setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) // we add 1 to the index field to make sure that the scripts below never compute log(0) .get(); } refresh(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java index e193d2ad69b..5a8d7c0150a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.search.highlight; -import com.google.common.collect.Lists; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; @@ -51,7 +51,7 @@ public class CustomHighlighter implements Highlighter { } } - List<Text> responses = Lists.newArrayList(); + List<Text> responses = new ArrayList<>(); responses.add(new StringText(String.format(Locale.ENGLISH, "standard response for %s at position %s", field.field(), cacheEntry.position))); diff --git a/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index b2cdbfab588..7aef0d2cdb1 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -545,7 +545,7 @@ public class SimpleSortIT extends ESIntegTestCase { .setSize(size) .addSort("str_value", SortOrder.ASC) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -558,7 +558,7 @@ public class SimpleSortIT extends ESIntegTestCase { .addSort("str_value", SortOrder.DESC) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -575,7 +575,7 @@ public class SimpleSortIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .setSize(size) .addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), "string")).execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -586,7 +586,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -601,7 +601,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -611,7 +611,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -625,7 +625,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -635,7 +635,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -649,7 +649,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -661,7 +661,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC) .execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -675,7 +675,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); @@ -687,7 +687,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC).execute() .actionGet(); assertHitCount(searchResponse, 10l); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); @@ -713,7 +713,7 @@ public class SimpleSortIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).execute() .actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); + assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().hits().length, equalTo(size)); for (int i = 0; i < size; i++) { assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 672dc5e081f..0b5e0d88a11 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.hppc.ObjectLongHashMap; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.collect.Lists; - import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse; @@ -51,6 +49,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; @@ -60,6 +60,7 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -145,7 +146,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { public void testThatWeightsAreWorking() throws Exception { createIndexAndMapping(completionMappingBuilder); - List<String> similarNames = Lists.newArrayList("the", "The Prodigy", "The Verve", "The the"); + List<String> similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the"); // the weight is 1000 divided by string length, so the results are easy to to check for (String similarName : similarNames) { client().prepareIndex(INDEX, TYPE, similarName).setSource(jsonBuilder() @@ -853,8 +854,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { private void assertSuggestions(SuggestResponse suggestResponse, boolean suggestionOrderStrict, String name, String... suggestions) { assertAllSuccessful(suggestResponse); - List<String> suggestionNames = Lists.newArrayList(); - for (Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestion : Lists.newArrayList(suggestResponse.getSuggest().iterator())) { + List<String> suggestionNames = new ArrayList<>(); + for (Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestion : iterableAsArrayList(suggestResponse.getSuggest())) { suggestionNames.add(suggestion.getName()); } String expectFieldInResponseMsg = String.format(Locale.ROOT, "Expected suggestion named %s in response, got %s", name, suggestionNames); @@ -881,7 +882,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } private List<String> getNames(Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option> suggestEntry) { - List<String> names = Lists.newArrayList(); + List<String> names = new ArrayList<>(); for (Suggest.Suggestion.Entry.Option entry : suggestEntry.getOptions()) { names.add(entry.getText().string()); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index 3447fbdeacb..9e51d259b91 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.search.suggest; -import com.google.common.collect.Lists; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -33,7 +33,7 @@ import java.util.List; import java.util.Locale; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.ESIntegTestCase.*; +import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -78,7 +78,9 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); // TODO: infer type once JI-9019884 is fixed - List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions = Lists.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>newArrayList(searchResponse.getSuggest().getSuggestion("someName").iterator()); + // TODO: see also JDK-8039214 + List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions + = CollectionUtils.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>iterableAsArrayList(searchResponse.getSuggest().getSuggestion("someName")); assertThat(suggestions, hasSize(2)); assertThat(suggestions.get(0).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix))); assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java index 1b29173973b..0bbd1cef8bf 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.suggest.completion; -import com.google.common.collect.Lists; - import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldsConsumer; @@ -89,7 +87,7 @@ public class CompletionPostingsFormatTest extends ESTestCase { public void testCompletionPostingsFormat() throws IOException { AnalyzingCompletionLookupProviderV1 providerV1 = new AnalyzingCompletionLookupProviderV1(true, false, true, true); AnalyzingCompletionLookupProvider currentProvider = new AnalyzingCompletionLookupProvider(true, false, true, true); - List<Completion090PostingsFormat.CompletionLookupProvider> providers = Lists.newArrayList(providerV1, currentProvider); + List<Completion090PostingsFormat.CompletionLookupProvider> providers = Arrays.asList(providerV1, currentProvider); Completion090PostingsFormat.CompletionLookupProvider randomProvider = providers.get(getRandom().nextInt(providers.size())); RAMDirectory dir = new RAMDirectory(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 715eda0176f..3454cd2561b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; -import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AbstractDiffable; @@ -55,6 +54,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryMissingException; @@ -67,7 +67,6 @@ import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.transport.TransportModule; import org.junit.Test; import java.io.IOException; @@ -81,7 +80,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.ClusterScope; import static org.elasticsearch.test.ESIntegTestCase.Scope; @@ -348,7 +346,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest @Test public void snapshotWithStuckNodeTest() throws Exception { logger.info("--> start 2 nodes"); - ArrayList<String> nodes = newArrayList(); + ArrayList<String> nodes = new ArrayList<>(); nodes.add(internalCluster().startNode()); nodes.add(internalCluster().startNode()); Client client = client(); @@ -611,9 +609,9 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest assertThat(client().prepareCount("test-idx").get().getCount(), equalTo(100L)); IntSet reusedShards = new IntHashSet(); - for (ShardRecoveryResponse response : client().admin().indices().prepareRecoveries("test-idx").get().shardResponses().get("test-idx")) { - if (response.recoveryState().getIndex().reusedBytes() > 0) { - reusedShards.add(response.getShardId()); + for (RecoveryState recoveryState : client().admin().indices().prepareRecoveries("test-idx").get().shardRecoveryStates().get("test-idx")) { + if (recoveryState.getIndex().reusedBytes() > 0) { + reusedShards.add(recoveryState.getShardId().getId()); } } logger.info("--> check that at least half of the shards had some reuse: [{}]", reusedShards); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 7e282305c89..c3d3f0ff896 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -64,6 +64,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Test; @@ -78,30 +79,13 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.shard.IndexShard.INDEX_REFRESH_INTERVAL; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesExist; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesMissing; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateMissing; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.startsWith; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCase { @@ -132,7 +116,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas ListenableActionFuture<FlushResponse> flushResponseFuture = null; if (randomBoolean()) { - ArrayList<String> indicesToFlush = newArrayList(); + ArrayList<String> indicesToFlush = new ArrayList<>(); for (int i = 1; i < 4; i++) { if (randomBoolean()) { indicesToFlush.add("test-idx-" + i); @@ -1317,6 +1301,63 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(0)); } + + @Test + public void readonlyRepositoryTest() throws Exception { + Client client = client(); + + logger.info("--> creating repository"); + Path repositoryLocation = randomRepoPath(); + assertAcked(client.admin().cluster().preparePutRepository("test-repo") + .setType("fs").setSettings(Settings.settingsBuilder() + .put("location", repositoryLocation) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); + + createIndex("test-idx"); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i); + } + refresh(); + + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); + + assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); + + logger.info("--> delete index"); + cluster().wipeIndices("test-idx"); + + logger.info("--> create read-only URL repository"); + assertAcked(client.admin().cluster().preparePutRepository("readonly-repo") + .setType("fs").setSettings(Settings.settingsBuilder() + .put("location", repositoryLocation) + .put("compress", randomBoolean()) + .put("readonly", true) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); + logger.info("--> restore index after deletion"); + RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("readonly-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").execute().actionGet(); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + + assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L)); + + logger.info("--> list available shapshots"); + GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("readonly-repo").get(); + assertThat(getSnapshotsResponse.getSnapshots(), notNullValue()); + assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); + + logger.info("--> try deleting snapshot"); + assertThrows(client.admin().cluster().prepareDeleteSnapshot("readonly-repo", "test-snap"), RepositoryException.class, "cannot delete snapshot from a readonly repository"); + + logger.info("--> try making another snapshot"); + assertThrows(client.admin().cluster().prepareCreateSnapshot("readonly-repo", "test-snap-2").setWaitForCompletion(true).setIndices("test-idx"), RepositoryException.class, "cannot create snapshot in a readonly repository"); + } + @Test public void throttlingTest() throws Exception { Client client = client(); diff --git a/core/src/test/java/org/elasticsearch/stresstest/manyindices/ManyNodesManyIndicesRecoveryStressTest.java b/core/src/test/java/org/elasticsearch/stresstest/manyindices/ManyNodesManyIndicesRecoveryStressTest.java index ea4d20f9149..ccd25a1607f 100644 --- a/core/src/test/java/org/elasticsearch/stresstest/manyindices/ManyNodesManyIndicesRecoveryStressTest.java +++ b/core/src/test/java/org/elasticsearch/stresstest/manyindices/ManyNodesManyIndicesRecoveryStressTest.java @@ -19,7 +19,6 @@ package org.elasticsearch.stresstest.manyindices; -import com.google.common.collect.Lists; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.client.Client; @@ -28,6 +27,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeBuilder; +import java.util.ArrayList; import java.util.List; public class ManyNodesManyIndicesRecoveryStressTest { @@ -48,7 +48,7 @@ public class ManyNodesManyIndicesRecoveryStressTest { .put("index.number_of_shards", 1) .build(); - List<Node> nodes = Lists.newArrayList(); + List<Node> nodes = new ArrayList<>(); for (int i = 0; i < NUM_NODES; i++) { nodes.add(NodeBuilder.nodeBuilder().settings(Settings.settingsBuilder().put(nodeSettings).put("name", "node" + i)).node()); } @@ -93,7 +93,7 @@ public class ManyNodesManyIndicesRecoveryStressTest { Thread.sleep(5000); System.out.println("--> All nodes are closed, starting back..."); - nodes = Lists.newArrayList(); + nodes = new ArrayList<>(); for (int i = 0; i < NUM_NODES; i++) { nodes.add(NodeBuilder.nodeBuilder().settings(Settings.settingsBuilder().put(nodeSettings).put("name", "node" + i)).node()); } diff --git a/core/src/test/java/org/elasticsearch/test/ESAllocationTestCase.java b/core/src/test/java/org/elasticsearch/test/ESAllocationTestCase.java index 991aea74345..d12f4d68752 100644 --- a/core/src/test/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -46,8 +46,8 @@ import java.util.List; import java.util.Map; import java.util.Random; -import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; @@ -132,7 +132,7 @@ public abstract class ESAllocationTestCase extends ESTestCase { if (initializingShards.isEmpty()) { return clusterState; } - RoutingTable routingTable = strategy.applyStartedShards(clusterState, newArrayList(initializingShards.get(randomInt(initializingShards.size() - 1)))).routingTable(); + RoutingTable routingTable = strategy.applyStartedShards(clusterState, arrayAsArrayList(initializingShards.get(randomInt(initializingShards.size() - 1)))).routingTable(); return ClusterState.builder(clusterState).routingTable(routingTable).build(); } diff --git a/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java b/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java index 2333380a4e9..5327f4e6325 100644 --- a/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java @@ -26,8 +26,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Joiner; import com.google.common.base.Predicate; -import com.google.common.collect.Lists; - import org.apache.http.impl.client.HttpClients; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -48,6 +46,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; @@ -70,17 +69,21 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -105,6 +108,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.shard.MergePolicyConfig; +import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogService; @@ -124,25 +128,47 @@ import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; -import org.junit.*; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; -import java.lang.annotation.*; +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; -import java.util.concurrent.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder; @@ -1379,7 +1405,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } } } else { - List<List<IndexRequestBuilder>> partition = Lists.partition(builders, Math.min(MAX_BULK_INDEX_REQUEST_SIZE, + List<List<IndexRequestBuilder>> partition = eagerPartition(builders, Math.min(MAX_BULK_INDEX_REQUEST_SIZE, Math.max(1, (int) (builders.size() * randomDouble())))); logger.info("Index [{}] docs async: [{}] bulk: [{}] partitions [{}]", builders.size(), false, true, partition.size()); for (List<IndexRequestBuilder> segmented : partition) { diff --git a/core/src/test/java/org/elasticsearch/test/ESTestCase.java b/core/src/test/java/org/elasticsearch/test/ESTestCase.java index 97d5a2ec917..e0c24a541c6 100644 --- a/core/src/test/java/org/elasticsearch/test/ESTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESTestCase.java @@ -31,7 +31,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; import com.google.common.base.Predicate; - import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -39,6 +38,7 @@ import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.BootstrapForTesting; +import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.DjbHashFunction; @@ -46,19 +46,22 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.cache.recycler.MockPageCacheRecycler; +import org.elasticsearch.search.MockSearchService; import org.elasticsearch.test.junit.listeners.AssertionErrorThreadDumpPrinter; import org.elasticsearch.test.junit.listeners.LoggingListener; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; -import org.elasticsearch.search.MockSearchService; import org.elasticsearch.threadpool.ThreadPool; -import org.junit.*; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; import org.junit.rules.RuleChain; import java.io.IOException; @@ -67,13 +70,18 @@ import java.nio.file.DirectoryStream; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import static com.google.common.collect.Lists.newArrayList; +import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.equalTo; /** @@ -564,7 +572,7 @@ public abstract class ESTestCase extends LuceneTestCase { if (size > values.length) { throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a list of " + values.length + " objects"); } - List<T> list = newArrayList(values); + List<T> list = arrayAsArrayList(values); Collections.shuffle(list); return list.subList(0, size); } diff --git a/core/src/test/java/org/elasticsearch/test/ExternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/ExternalTestCluster.java index 42b780c670c..9a919a391cb 100644 --- a/core/src/test/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/ExternalTestCluster.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; -import com.google.common.collect.Lists; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -38,6 +37,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; +import java.util.Collections; import java.util.Iterator; import java.util.concurrent.atomic.AtomicInteger; @@ -153,7 +153,7 @@ public final class ExternalTestCluster extends TestCluster { @Override public Iterator<Client> iterator() { - return Lists.newArrayList(client).iterator(); + return Collections.singleton(client).iterator(); } @Override diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 557206f9505..c6e81f5f80d 100644 --- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -29,7 +29,6 @@ import com.google.common.base.Predicates; import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; @@ -95,9 +94,9 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.search.MockSearchService; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; @@ -130,15 +129,11 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static junit.framework.Assert.fail; -import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; -import static org.apache.lucene.util.LuceneTestCase.rarely; -import static org.apache.lucene.util.LuceneTestCase.usually; +import static org.apache.lucene.util.LuceneTestCase.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; /** @@ -177,8 +172,25 @@ public final class InternalTestCluster extends TestCluster { */ public static final int PORTS_PER_JVM = 100; + /** + * The number of ports in the range used for this cluster + */ + public static final int PORTS_PER_CLUSTER = 20; + + private static final int GLOBAL_TRANSPORT_BASE_PORT = 9300; + private static final int GLOBAL_HTTP_BASE_PORT = 19200; + private static final int JVM_ORDINAL = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0")); - public static final int BASE_PORT = 9300 + PORTS_PER_JVM * (JVM_ORDINAL + 1); + + /** a per-JVM unique offset to be used for calculating unique port ranges. */ + public static final int JVM_BASE_PORT_OFFEST = PORTS_PER_JVM * (JVM_ORDINAL + 1); + + private static final AtomicInteger clusterOrdinal = new AtomicInteger(); + private final int CLUSTER_BASE_PORT_OFFSET = JVM_BASE_PORT_OFFEST + (clusterOrdinal.getAndIncrement() * PORTS_PER_CLUSTER) % PORTS_PER_JVM; + + public final int TRANSPORT_BASE_PORT = GLOBAL_TRANSPORT_BASE_PORT + CLUSTER_BASE_PORT_OFFSET; + public final int HTTP_BASE_PORT = GLOBAL_HTTP_BASE_PORT + CLUSTER_BASE_PORT_OFFSET; + private static final boolean ENABLE_MOCK_MODULES = RandomizedTest.systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true); @@ -294,8 +306,8 @@ public final class InternalTestCluster extends TestCluster { builder.put("path.shared_data", baseDir.resolve("custom")); builder.put("path.home", baseDir); builder.put("path.repo", baseDir.resolve("repos")); - builder.put("transport.tcp.port", BASE_PORT + "-" + (BASE_PORT + 100)); - builder.put("http.port", BASE_PORT + 101 + "-" + (BASE_PORT + 200)); + builder.put("transport.tcp.port", TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); + builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); builder.put("node.mode", nodeMode); builder.put("http.pipelining", enableHttpPipelining); @@ -556,7 +568,7 @@ public final class InternalTestCluster extends TestCluster { * stop any of the running nodes. */ public void ensureAtLeastNumDataNodes(int n) { - List<ListenableFuture<String>> futures = Lists.newArrayList(); + List<ListenableFuture<String>> futures = new ArrayList<>(); synchronized (this) { int size = numDataNodes(); for (int i = size; i < n; i++) { @@ -1198,7 +1210,7 @@ public final class InternalTestCluster extends TestCluster { @Override public InetSocketAddress[] httpAddresses() { - List<InetSocketAddress> addresses = Lists.newArrayList(); + List<InetSocketAddress> addresses = new ArrayList<>(); for (HttpServerTransport httpServerTransport : getInstances(HttpServerTransport.class)) { addresses.add(((InetSocketTransportAddress) httpServerTransport.boundAddress().publishAddress()).address()); } @@ -1602,7 +1614,7 @@ public final class InternalTestCluster extends TestCluster { * Starts multiple nodes in an async manner with the given settings and version and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes, final Settings settings, final Version version) { - List<ListenableFuture<String>> futures = Lists.newArrayList(); + List<ListenableFuture<String>> futures = new ArrayList<>(); for (int i = 0; i < numNodes; i++) { futures.add(startNodeAsync(settings, version)); } @@ -1614,7 +1626,7 @@ public final class InternalTestCluster extends TestCluster { * The order of the node names returned matches the order of the settings provided. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final Settings... settings) { - List<ListenableFuture<String>> futures = Lists.newArrayList(); + List<ListenableFuture<String>> futures = new ArrayList<>(); for (Settings setting : settings) { futures.add(startNodeAsync(setting, Version.CURRENT)); } diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java index 2e0c293c1de..c0866a81081 100644 --- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java +++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java @@ -23,9 +23,9 @@ import org.apache.log4j.Level; import org.apache.log4j.spi.LoggingEvent; import org.elasticsearch.common.regex.Regex; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -39,7 +39,7 @@ public class MockLogAppender extends AppenderSkeleton { private List<LoggingExpectation> expectations; public MockLogAppender() { - expectations = newArrayList(); + expectations = new ArrayList<>(); } public void addExpectation(LoggingExpectation expectation) { diff --git a/core/src/test/java/org/elasticsearch/test/TestSearchContext.java b/core/src/test/java/org/elasticsearch/test/TestSearchContext.java index fa8aeed014b..e6a37dcf02a 100644 --- a/core/src/test/java/org/elasticsearch/test/TestSearchContext.java +++ b/core/src/test/java/org/elasticsearch/test/TestSearchContext.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -284,8 +285,8 @@ public class TestSearchContext extends SearchContext { return searcher; } - public void setSearcher(ContextIndexSearcher searcher) { - this.searcher = searcher; + public void setSearcher(Engine.Searcher searcher) { + this.searcher = new ContextIndexSearcher(this, searcher); } @Override diff --git a/core/src/test/java/org/elasticsearch/test/XContentTestUtils.java b/core/src/test/java/org/elasticsearch/test/XContentTestUtils.java index 647930398b6..866a19e0a72 100644 --- a/core/src/test/java/org/elasticsearch/test/XContentTestUtils.java +++ b/core/src/test/java/org/elasticsearch/test/XContentTestUtils.java @@ -19,13 +19,13 @@ package org.elasticsearch.test; -import com.google.common.collect.Lists; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -78,7 +78,7 @@ public final class XContentTestUtils { } } else if (first instanceof List) { if (second instanceof List) { - List<Object> secondList = Lists.newArrayList((List<Object>) second); + List<Object> secondList = new ArrayList<>((List<Object>) second); List<Object> firstList = (List<Object>) first; if (firstList.size() == secondList.size()) { String reason = path + ": no matches found"; diff --git a/core/src/test/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/core/src/test/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 49b567bda64..bcd9384a88b 100644 --- a/core/src/test/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/core/src/test/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -100,7 +100,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { } private static int calcBasePort() { - return 30000 + InternalTestCluster.BASE_PORT; + return 30000 + InternalTestCluster.JVM_BASE_PORT_OFFEST; } @Override diff --git a/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java b/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java index 9315fff0a20..f3e18d75124 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import com.google.common.collect.Lists; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressFsync; @@ -68,6 +67,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; import java.nio.file.StandardCopyOption; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -169,11 +169,11 @@ public abstract class ESRestTestCase extends ESIntegTestCase { enabled = false; } if (!enabled) { - return Lists.newArrayList(); + return new ArrayList<>(); } //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system List<RestTestCandidate> restTestCandidates = collectTestCandidates(id, count); - List<Object[]> objects = Lists.newArrayList(); + List<Object[]> objects = new ArrayList<>(); for (RestTestCandidate restTestCandidate : restTestCandidates) { objects.add(new Object[]{restTestCandidate}); } @@ -181,7 +181,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { } private static List<RestTestCandidate> collectTestCandidates(int id, int count) throws RestTestParseException, IOException { - List<RestTestCandidate> testCandidates = Lists.newArrayList(); + List<RestTestCandidate> testCandidates = new ArrayList<>(); FileSystem fileSystem = getFileSystem(); // don't make a try-with, getFileSystem returns null // ... and you can't close() the default filesystem @@ -191,7 +191,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser(); //yaml suites are grouped by directory (effectively by api) for (String api : yamlSuites.keySet()) { - List<Path> yamlFiles = Lists.newArrayList(yamlSuites.get(api)); + List<Path> yamlFiles = new ArrayList<>(yamlSuites.get(api)); for (Path yamlFile : yamlFiles) { String key = api + yamlFile.getFileName().toString(); if (mustExecute(key, id, count)) { diff --git a/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java b/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java index f18f9201d1a..4eed5510797 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java @@ -19,10 +19,8 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; @@ -46,6 +44,7 @@ import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.spec.RestApi; import org.elasticsearch.test.rest.spec.RestSpec; +import javax.net.ssl.SSLContext; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; @@ -57,13 +56,12 @@ import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import javax.net.ssl.SSLContext; - /** * REST client used to test the elasticsearch REST layer * Holds the {@link RestSpec} used to translate api calls into REST calls @@ -136,7 +134,7 @@ public class RestClient implements Closeable { */ public RestResponse callApi(String apiName, Map<String, String> params, String body) throws IOException, RestException { - List<Integer> ignores = Lists.newArrayList(); + List<Integer> ignores = new ArrayList<>(); Map<String, String> requestParams = null; if (params != null) { //makes a copy of the parameters before modifying them for this specific request diff --git a/core/src/test/java/org/elasticsearch/test/rest/json/JsonPath.java b/core/src/test/java/org/elasticsearch/test/rest/json/JsonPath.java index 567e849a391..c3dbd583430 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/json/JsonPath.java +++ b/core/src/test/java/org/elasticsearch/test/rest/json/JsonPath.java @@ -18,13 +18,12 @@ */ package org.elasticsearch.test.rest.json; -import com.google.common.collect.Lists; - import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.rest.Stash; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -93,7 +92,7 @@ public class JsonPath { } private String[] parsePath(String path) { - List<String> list = Lists.newArrayList(); + List<String> list = new ArrayList<>(); StringBuilder current = new StringBuilder(); boolean escape = false; for (int i = 0; i < path.length(); i++) { diff --git a/core/src/test/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java b/core/src/test/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java index 0a81583cf32..33733821019 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java +++ b/core/src/test/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.test.rest.parser; -import com.google.common.collect.Lists; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.rest.section.SkipSection; import java.io.IOException; +import java.util.ArrayList; import java.util.List; /** @@ -40,7 +40,7 @@ public class SkipSectionParser implements RestTestFragmentParser<SkipSection> { XContentParser.Token token; String version = null; String reason = null; - List<String> features = Lists.newArrayList(); + List<String> features = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/core/src/test/java/org/elasticsearch/test/rest/section/ApiCallSection.java index 2a49cd4c594..a40d2268d43 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/ApiCallSection.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/ApiCallSection.java @@ -21,9 +21,9 @@ package org.elasticsearch.test.rest.section; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -34,7 +34,7 @@ public class ApiCallSection { private final String api; private final Map<String, String> params = Maps.newHashMap(); - private final List<Map<String, Object>> bodies = Lists.newArrayList(); + private final List<Map<String, Object>> bodies = new ArrayList<>(); public ApiCallSection(String api) { this.api = api; diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java index a9047c18db1..923ba93521c 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.section; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -71,6 +71,6 @@ public class RestTestSuite { } public List<TestSection> getTestSections() { - return Lists.newArrayList(testSections); + return new ArrayList<>(testSections); } } diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/SetupSection.java b/core/src/test/java/org/elasticsearch/test/rest/section/SetupSection.java index 72f653e6c8f..45c66fbad4f 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/SetupSection.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/SetupSection.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.test.rest.section; -import com.google.common.collect.Lists; - +import java.util.ArrayList; import java.util.List; /** @@ -36,7 +35,7 @@ public class SetupSection { private SkipSection skipSection; - private List<DoSection> doSections = Lists.newArrayList(); + private List<DoSection> doSections = new ArrayList<>(); public SkipSection getSkipSection() { return skipSection; diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/SkipSection.java b/core/src/test/java/org/elasticsearch/test/rest/section/SkipSection.java index e7ab4555776..179d0a1e868 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/SkipSection.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/SkipSection.java @@ -18,12 +18,11 @@ */ package org.elasticsearch.test.rest.section; -import com.google.common.collect.Lists; import org.elasticsearch.Version; -import org.elasticsearch.common.Strings; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.support.Features; +import java.util.ArrayList; import java.util.List; /** @@ -44,7 +43,7 @@ public class SkipSection { private SkipSection() { this.lowerVersion = null; this.upperVersion = null; - this.features = Lists.newArrayList(); + this.features = new ArrayList<>(); this.reason = null; } diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/TestSection.java b/core/src/test/java/org/elasticsearch/test/rest/section/TestSection.java index def613b1c56..3f44e5ce767 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/TestSection.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/TestSection.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.test.rest.section; -import com.google.common.collect.Lists; - +import java.util.ArrayList; import java.util.List; /** @@ -32,7 +31,7 @@ public class TestSection implements Comparable<TestSection> { public TestSection(String name) { this.name = name; - this.executableSections = Lists.newArrayList(); + this.executableSections = new ArrayList<>(); } public String getName() { diff --git a/core/src/test/java/org/elasticsearch/test/rest/spec/RestApi.java b/core/src/test/java/org/elasticsearch/test/rest/spec/RestApi.java index 0996df45b48..7931be1649f 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/spec/RestApi.java +++ b/core/src/test/java/org/elasticsearch/test/rest/spec/RestApi.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.test.rest.spec; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -35,10 +35,10 @@ import java.util.regex.Pattern; public class RestApi { private final String name; - private List<String> methods = Lists.newArrayList(); - private List<String> paths = Lists.newArrayList(); - private List<String> pathParts = Lists.newArrayList(); - private List<String> params = Lists.newArrayList(); + private List<String> methods = new ArrayList<>(); + private List<String> paths = new ArrayList<>(); + private List<String> pathParts = new ArrayList<>(); + private List<String> params = new ArrayList<>(); private BODY body = BODY.NOT_SUPPORTED; public static enum BODY { @@ -63,7 +63,7 @@ public class RestApi { public List<String> getSupportedMethods(Set<String> restParams) { //we try to avoid hardcoded mappings but the index api is the exception if ("index".equals(name) || "create".equals(name)) { - List<String> indexMethods = Lists.newArrayList(); + List<String> indexMethods = new ArrayList<>(); for (String method : methods) { if (restParams.contains("id")) { //PUT when the id is provided @@ -163,7 +163,7 @@ public class RestApi { */ private List<RestPath> findMatchingRestPaths(Set<String> restParams) { - List<RestPath> matchingRestPaths = Lists.newArrayList(); + List<RestPath> matchingRestPaths = new ArrayList<>(); RestPath[] restPaths = buildRestPaths(); for (RestPath restPath : restPaths) { diff --git a/core/src/test/java/org/elasticsearch/test/rest/support/Features.java b/core/src/test/java/org/elasticsearch/test/rest/support/Features.java index 3b867262685..018d2413737 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/support/Features.java +++ b/core/src/test/java/org/elasticsearch/test/rest/support/Features.java @@ -19,9 +19,9 @@ package org.elasticsearch.test.rest.support; -import com.google.common.collect.Lists; import org.elasticsearch.test.ESIntegTestCase; +import java.util.Arrays; import java.util.List; /** @@ -34,7 +34,7 @@ import java.util.List; */ public final class Features { - private static final List<String> SUPPORTED = Lists.newArrayList("stash_in_path", "groovy_scripting"); + private static final List<String> SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting"); private Features() { diff --git a/core/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/core/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index cb724bfd9f0..2770e3581d2 100644 --- a/core/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/core/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -30,9 +30,7 @@ import org.elasticsearch.test.NodeConfigurationSource; import java.io.IOException; import java.nio.file.Path; -import java.util.Iterator; -import java.util.Map; -import java.util.Random; +import java.util.*; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -57,26 +55,40 @@ public class InternalTestClusterTests extends ESTestCase { Path baseDir = createTempDir(); InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix); InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix); - assertClusters(cluster0, cluster1, true); + // TODO: this is not ideal - we should have a way to make sure ports are initialized in the same way + assertClusters(cluster0, cluster1, false); } - public static void assertClusters(InternalTestCluster cluster0, InternalTestCluster cluster1, boolean assertClusterName) { + /** + * a set of settings that are expected to have different values betweem clusters, even they have been initialized with the same + * base settins. + */ + final static Set<String> clusterUniqueSettings = new HashSet<>(); + + static { + clusterUniqueSettings.add(ClusterName.SETTING); + clusterUniqueSettings.add("transport.tcp.port"); + clusterUniqueSettings.add("http.port"); + clusterUniqueSettings.add("http.port"); + } + + public static void assertClusters(InternalTestCluster cluster0, InternalTestCluster cluster1, boolean checkClusterUniqueSettings) { Settings defaultSettings0 = cluster0.getDefaultSettings(); Settings defaultSettings1 = cluster1.getDefaultSettings(); - assertSettings(defaultSettings0, defaultSettings1, assertClusterName); + assertSettings(defaultSettings0, defaultSettings1, checkClusterUniqueSettings); assertThat(cluster0.numDataNodes(), equalTo(cluster1.numDataNodes())); - if (assertClusterName) { + if (checkClusterUniqueSettings) { assertThat(cluster0.getClusterName(), equalTo(cluster1.getClusterName())); } } - public static void assertSettings(Settings left, Settings right, boolean compareClusterName) { + public static void assertSettings(Settings left, Settings right, boolean checkClusterUniqueSettings) { ImmutableSet<Map.Entry<String, String>> entries0 = left.getAsMap().entrySet(); Map<String, String> entries1 = right.getAsMap(); assertThat(entries0.size(), equalTo(entries1.size())); for (Map.Entry<String, String> entry : entries0) { - if(entry.getKey().equals(ClusterName.SETTING) && compareClusterName == false) { + if (clusterUniqueSettings.contains(entry.getKey()) && checkClusterUniqueSettings == false) { continue; } assertThat(entries1, hasEntry(entry.getKey(), entry.getValue())); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 4847d2b528e..a8478aef051 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -41,7 +41,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.SettingsSource; +import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.test.TestCluster; import org.junit.After; import org.junit.AfterClass; @@ -50,7 +50,6 @@ import org.junit.Test; import java.io.IOException; import java.util.ArrayList; -import java.util.List; import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -76,20 +75,8 @@ public class TribeIT extends ESIntegTestCase { @BeforeClass public static void setupSecondCluster() throws Exception { ESIntegTestCase.beforeClass(); - SettingsSource source = new SettingsSource() { - @Override - public Settings node(int nodeOrdinal) { - final int base = InternalTestCluster.BASE_PORT + 1000; - return Settings.builder().put("transport.tcp.port", base + "-" + (base + 100)).build(); - } - - @Override - public Settings transportClient() { - return node(0); - } - }; - // create another cluster - cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2, Strings.randomBase64UUID(getRandom()), source, 0, false, SECOND_CLUSTER_NODE_PREFIX); + cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2, + Strings.randomBase64UUID(getRandom()), NodeConfigurationSource.EMPTY, 0, false, SECOND_CLUSTER_NODE_PREFIX); cluster2.beforeTest(getRandom(), 0.1); cluster2.ensureAtLeastNumDataNodes(2); diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index 6c4bd969955..b24a752c55d 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -20,17 +20,21 @@ package org.elasticsearch.ttl; import com.google.common.base.Predicate; + import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.update.UpdateRequestBuilder; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.get.GetField; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.junit.Test; +import org.elasticsearch.test.ESIntegTestCase.Scope; import java.io.IOException; import java.util.Locale; @@ -39,9 +43,16 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope= Scope.SUITE, numDataNodes = 1) public class SimpleTTLIT extends ESIntegTestCase { @@ -63,7 +74,6 @@ public class SimpleTTLIT extends ESIntegTestCase { .build(); } - @Test public void testSimpleTTL() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", XContentFactory.jsonBuilder() @@ -200,7 +210,7 @@ public class SimpleTTLIT extends ESIntegTestCase { assertThat(getResponse.isExists(), equalTo(false)); } - @Test // issue 5053 + // issue 5053 public void testThatUpdatingMappingShouldNotRemoveTTLConfiguration() throws Exception { String index = "foo"; String type = "mytype"; @@ -220,6 +230,63 @@ public class SimpleTTLIT extends ESIntegTestCase { assertTTLMappingEnabled(index, type); } + /** + * Test that updates with detect_noop set to true (the default) that don't + * change the source don't change the ttl. This is unexpected behavior and + * documented in ttl-field.asciidoc. If this behavior changes it is safe to + * rewrite this test to reflect the new behavior and to change the + * documentation. + */ + public void testNoopUpdate() throws IOException { + assertAcked(prepareCreate("test") + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_timestamp").field("enabled", true).endObject() + .startObject("_ttl").field("enabled", true).endObject() + .endObject() + .endObject())); + ensureYellow("test"); + + long aLongTime = 10000000; + long firstTtl = aLongTime * 3; + long secondTtl = aLongTime * 2; + long thirdTtl = aLongTime * 1; + IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") + .setTTL(firstTtl).setRefresh(true).get(); + assertTrue(indexResponse.isCreated()); + assertThat(getTtl("type1", 1), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); + + // Updating with the default detect_noop without a change to the document doesn't change the ttl. + UpdateRequestBuilder update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value1").setTtl(secondTtl); + assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); + + // Updating with the default detect_noop with a change to the document does change the ttl. + update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl); + assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); + + // Updating with detect_noop=true without a change to the document doesn't change the ttl. + update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl).setDetectNoop(true); + assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); + + // Updating with detect_noop=false without a change to the document does change the ttl. + update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(thirdTtl).setDetectNoop(false); + assertThat(updateAndGetTtl(update), lessThanOrEqualTo(thirdTtl)); + } + + private long updateAndGetTtl(UpdateRequestBuilder update) { + UpdateResponse updateResponse = update.setFields("_ttl").get(); + assertThat(updateResponse.getShardInfo().getFailed(), equalTo(0)); + // You can't actually fetch _ttl from an update so we use a get. + return getTtl(updateResponse.getType(), updateResponse.getId()); + } + + private long getTtl(String type, Object id) { + GetResponse getResponse = client().prepareGet("test", type, id.toString()).setFields("_ttl").setRealtime(true).execute() + .actionGet(); + return ((Number) getResponse.getField("_ttl").getValue()).longValue(); + } + private void assertTTLMappingEnabled(String index, String type) throws IOException { String errMsg = String.format(Locale.ROOT, "Expected ttl field mapping to be enabled for %s/%s", index, type); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index 3f0b1bcc1ec..499fb52a98b 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.update; +import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -44,8 +45,10 @@ public class UpdateNoopIT extends ESIntegTestCase { updateAndCheckSource(4, fields("bar", null)); updateAndCheckSource(4, fields("bar", null)); updateAndCheckSource(5, fields("bar", "foo")); + // detect_noop defaults to true + updateAndCheckSource(5, null, fields("bar", "foo")); - assertEquals(3, totalNoopUpdates()); + assertEquals(4, totalNoopUpdates()); } @Test @@ -210,7 +213,8 @@ public class UpdateNoopIT extends ESIntegTestCase { } /** - * Totally empty requests are noop if and only if detect noops is true. + * Totally empty requests are noop if and only if detect noops is true and + * its true by default. */ @Test public void totallyEmpty() throws Exception { @@ -223,6 +227,7 @@ public class UpdateNoopIT extends ESIntegTestCase { .endObject()); update(true, 1, XContentFactory.jsonBuilder().startObject().endObject()); update(false, 2, XContentFactory.jsonBuilder().startObject().endObject()); + update(null, 2, XContentFactory.jsonBuilder().startObject().endObject()); } private XContentBuilder fields(Object... fields) throws IOException { @@ -237,17 +242,23 @@ public class UpdateNoopIT extends ESIntegTestCase { } private void updateAndCheckSource(long expectedVersion, XContentBuilder xContentBuilder) { - UpdateResponse updateResponse = update(true, expectedVersion, xContentBuilder); + updateAndCheckSource(expectedVersion, true, xContentBuilder); + } + + private void updateAndCheckSource(long expectedVersion, Boolean detectNoop, XContentBuilder xContentBuilder) { + UpdateResponse updateResponse = update(detectNoop, expectedVersion, xContentBuilder); assertEquals(updateResponse.getGetResult().sourceRef().toUtf8(), xContentBuilder.bytes().toUtf8()); } - private UpdateResponse update(boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { - UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") + private UpdateResponse update(Boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { + UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") .setDoc(xContentBuilder.bytes().toUtf8()) .setDocAsUpsert(true) - .setDetectNoop(detectNoop) - .setFields("_source") - .execute().actionGet(); + .setFields("_source"); + if (detectNoop != null) { + updateRequest.setDetectNoop(detectNoop); + } + UpdateResponse updateResponse = updateRequest.get(); assertThat(updateResponse.getGetResult(), notNullValue()); assertEquals(expectedVersion, updateResponse.getVersion()); return updateResponse; diff --git a/core/src/test/java/org/elasticsearch/watcher/FileWatcherTest.java b/core/src/test/java/org/elasticsearch/watcher/FileWatcherTest.java index 457f7578f9b..537fd873a09 100644 --- a/core/src/test/java/org/elasticsearch/watcher/FileWatcherTest.java +++ b/core/src/test/java/org/elasticsearch/watcher/FileWatcherTest.java @@ -29,12 +29,10 @@ import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.ArrayList; import java.util.List; -import static com.google.common.collect.Lists.newArrayList; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.*; @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class FileWatcherTest extends ESTestCase { @@ -51,7 +49,7 @@ public class FileWatcherTest extends ESTestCase { return rootDir.toUri().relativize(file.toUri()).getPath(); } - private List<String> notifications = newArrayList(); + private List<String> notifications = new ArrayList<>(); @Override public void onFileInit(Path file) { diff --git a/dev-tools/ElasticSearch.launch b/dev-tools/Elasticsearch.launch similarity index 67% rename from dev-tools/ElasticSearch.launch rename to dev-tools/Elasticsearch.launch index c501e4b8818..2016518cd23 100644 --- a/dev-tools/ElasticSearch.launch +++ b/dev-tools/Elasticsearch.launch @@ -6,12 +6,13 @@ <listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES"> <listEntry value="1"/> </listAttribute> -<mapAttribute key="org.eclipse.debug.core.environmentVariables"> -<mapEntry key="ES_HOME" value="${target_home}"/> -</mapAttribute> -<stringAttribute key="org.eclipse.jdt.launching.CLASSPATH_PROVIDER" value="org.eclipse.m2e.launchconfig.classpathProvider"/> +<listAttribute key="org.eclipse.debug.ui.favoriteGroups"> +<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/> +<listEntry value="org.eclipse.debug.ui.launchGroup.run"/> +</listAttribute> +<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_START_ON_FIRST_THREAD" value="true"/> <stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="org.elasticsearch.bootstrap.Elasticsearch"/> +<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="start"/> <stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="elasticsearch"/> -<stringAttribute key="org.eclipse.jdt.launching.SOURCE_PATH_PROVIDER" value="org.eclipse.m2e.launchconfig.sourcepathProvider"/> -<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -ea"/> +<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -ea -Des.path.home=target/eclipse_run -Des.security.manager.enabled=false"/> </launchConfiguration> diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 1a2bfbdc489..22ba7f78ce5 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -237,15 +237,19 @@ def generate_index(client, version, index_name): } } + settings = { + 'number_of_shards': 1, + 'number_of_replicas': 0, + } + if version.startswith('0.') or version.startswith('1.'): + # Same as ES default (60 seconds), but missing the units to make sure they are inserted on upgrade: + settings['gc_deletes'] = '60000', + # Same as ES default (5 GB), but missing the units to make sure they are inserted on upgrade: + settings['merge.policy.max_merged_segment'] = '5368709120' + + client.indices.create(index=index_name, body={ - 'settings': { - 'number_of_shards': 1, - 'number_of_replicas': 0, - # Same as ES default (60 seconds), but missing the units to make sure they are inserted on upgrade: - "gc_deletes": '60000', - # Same as ES default (5 GB), but missing the units to make sure they are inserted on upgrade: - "merge.policy.max_merged_segment": '5368709120' - }, + 'settings': settings, 'mappings': mappings }) health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) @@ -374,7 +378,8 @@ def create_bwc_index(cfg, version): # 10067: get a delete-by-query into the translog on upgrade. We must do # this after the snapshot, because it calls flush. Otherwise the index # will already have the deletions applied on upgrade. - delete_by_query(client, version, index_name, 'doc') + if version.startswith('0.') or version.startswith('1.'): + delete_by_query(client, version, index_name, 'doc') shutdown_node(node) node = None diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl index 464678560f4..c96645c1d6e 100644 --- a/dev-tools/es_release_notes.pl +++ b/dev-tools/es_release_notes.pl @@ -29,7 +29,7 @@ my $Issue_URL = "http://github.com/${User_Repo}issues/"; my @Groups = qw( breaking deprecation feature - enhancement bug regression build doc test + enhancement bug regression upgrade build doc test ); my %Group_Labels = ( breaking => 'Breaking changes', @@ -39,8 +39,9 @@ my %Group_Labels = ( feature => 'New features', enhancement => 'Enhancements', bug => 'Bug fixes', - regression => 'Regression', + regression => 'Regressions', test => 'Tests', + upgrade => 'Upgrades', other => 'NOT CLASSIFIED', ); diff --git a/dev-tools/get-bwc-version.py b/dev-tools/get-bwc-version.py index a44c06af576..54c559d1dc8 100644 --- a/dev-tools/get-bwc-version.py +++ b/dev-tools/get-bwc-version.py @@ -64,8 +64,10 @@ def main(): if c.version == '1.2.0': # 1.2.0 was pulled from download.elasticsearch.org because of routing bug: url = 'http://central.maven.org/maven2/org/elasticsearch/elasticsearch/1.2.0/%s' % filename - else: + elif c.version.startswith('0.') or c.version.startswith('1.'): url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename + else: + url = 'http://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/%s/%s' % (c.version, filename) print('Downloading %s' % url) urllib.request.urlretrieve(url, filename) diff --git a/dev-tools/prepare_release_candidate.py b/dev-tools/prepare_release_candidate.py index baaac881474..c2f77266a93 100644 --- a/dev-tools/prepare_release_candidate.py +++ b/dev-tools/prepare_release_candidate.py @@ -23,7 +23,7 @@ # # python3 ./dev-tools/prepare-release.py # -# Note: Ensure the script is run from the root directory +# Note: Ensure the script is run from the elasticsearch top level directory # import fnmatch @@ -76,6 +76,11 @@ gpgcheck=1 gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch enabled=1 +To smoke-test the release please run: + + python3 -B ./dev-tools/smoke_tests_rc.py --version %(version)s --hash %(hash)s --plugins license,shield,watcher + +NOTE: this script requires JAVA_HOME to point to a Java 7 Runtime [1] https://github.com/elastic/elasticsearch/commit/%(hash)s [2] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip @@ -83,12 +88,14 @@ enabled=1 [4] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm [5] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb """ - -def run(command, env_vars=None): +VERBOSE=True +def run(command, env_vars=None, verbose=VERBOSE): if env_vars: for key, value in env_vars.items(): os.putenv(key, value) - if os.system('%s' % (command)): + if not verbose: + command = '%s >> /dev/null 2>&1' % (command) + if os.system(command): raise RuntimeError(' FAILED: %s' % (command)) def ensure_checkout_is_clean(): @@ -181,16 +188,20 @@ if __name__ == "__main__": help='Only runs a maven install to skip the remove deployment step') parser.add_argument('--gpg-key', '-k', dest='gpg_key', default="D88E42B4", help='Allows you to specify a different gpg_key to be used instead of the default release key') + parser.add_argument('--verbose', '-b', dest='verbose', + help='Runs the script in verbose mode') parser.set_defaults(deploy=False) parser.set_defaults(skip_doc_check=False) parser.set_defaults(push=False) parser.set_defaults(install_only=False) + parser.set_defaults(verbose=False) args = parser.parse_args() install_and_deploy = args.deploy skip_doc_check = args.skip_doc_check push = args.push gpg_key = args.gpg_key install_only = args.install_only + VERBOSE = args.verbose ensure_checkout_is_clean() release_version = find_release_version() @@ -269,5 +280,14 @@ if __name__ == "__main__": """) print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!') print('*** Once the release is deployed and published send out the following mail to dev@elastic.co:') - print(MAIL_TEMPLATE % ({'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version})) + string_format_dict = {'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version} + print(MAIL_TEMPLATE % string_format_dict) + + print('To publish the release and the repo on S3 execute the following commands:') + print(' s3cmd cp --recursive s3://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/repos/elasticsearch/%(major_minor_version)s/ s3://packages.elasticsearch.org/elasticsearch/%(major_minor_version)s' % string_format_dict) + print(' s3cmd cp --recursive s3://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/ s3://download.elasticsearch.org/elasticsearch/release/org' % string_format_dict) + print('Now go ahead and tag the release:') + print(' git tag -a v%(version)s %(hash)s' % string_format_dict) + print(' git push origin v%(version)s' % string_format_dict ) + diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py new file mode 100644 index 00000000000..2678024d177 --- /dev/null +++ b/dev-tools/smoke_test_rc.py @@ -0,0 +1,282 @@ +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +# Smoke-tests a release candidate +# +# 1. Downloads the tar.gz, deb, RPM and zip file from the staging URL +# 2. Verifies it's sha1 hashes and GPG signatures against the release key +# 3. Installs all official plugins +# 4. Starts one node for tar.gz and zip packages and checks: +# -- if it runs with Java 1.7 +# -- if the build hash given is the one that is returned by the status response +# -- if the build is a release version and not a snapshot version +# -- if all plugins are loaded +# -- if the status response returns the correct version +# +# USAGE: +# +# python3 -B ./dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47 +# +# to also test other plugins try run +# +# python3 -B ./dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47 --plugins license,shield,watcher +# +# Note: Ensure the script is run from the elasticsearch top level directory +# +# For testing a release from sonatype try this: +# +# python3 -B dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47 --fetch_url https://oss.sonatype.org/content/repositories/releases/ +# + +import argparse +import tempfile +import os +import signal +import shutil +import urllib +import urllib.request +import hashlib +import time +import socket +import json +import base64 + +from prepare_release_candidate import run +from http.client import HTTPConnection + +DEFAULT_PLUGINS = ["analysis-icu", + "analysis-kuromoji", + "analysis-phonetic", + "analysis-smartcn", + "analysis-stempel", + "cloud-aws", + "cloud-azure", + "cloud-gce", + "delete-by-query", + "discovery-multicast", + "lang-javascript", + "lang-python", + "mapper-murmur3", + "mapper-size"] + +try: + JAVA_HOME = os.environ['JAVA_HOME'] +except KeyError: + raise RuntimeError(""" + Please set JAVA_HOME in the env before running release tool + On OSX use: export JAVA_HOME=`/usr/libexec/java_home -v '1.7*'`""") + +def java_exe(): + path = JAVA_HOME + return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path) + +def verify_java_version(version): + s = os.popen('%s; java -version 2>&1' % java_exe()).read() + if ' version "%s.' % version not in s: + raise RuntimeError('got wrong version for java %s:\n%s' % (version, s)) + + +def sha1(file): + with open(file, 'rb') as f: + return hashlib.sha1(f.read()).hexdigest() + +def read_fully(file): + with open(file, encoding='utf-8') as f: + return f.read() + + +def wait_for_node_startup(host='127.0.0.1', port=9200, timeout=60, header={}): + print(' Waiting until node becomes available for at most %s seconds' % timeout) + for _ in range(timeout): + conn = HTTPConnection(host=host, port=port, timeout=timeout) + try: + time.sleep(1) + conn.request('GET', '', headers=header) + res = conn.getresponse() + if res.status == 200: + return True + except socket.error as e: + pass + #that is ok it might not be there yet + finally: + conn.close() + return False + +def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS, verbose=False): + print('Downloading and verifying release %s from %s' % (version, base_url)) + tmp_dir = tempfile.mkdtemp() + try: + downloaded_files = [] + print(' ' + '*' * 80) + for file in files: + name = os.path.basename(file) + print(' Smoketest file: %s' % name) + url = '%s/%s' % (base_url, file) + print(' Downloading %s' % (url)) + artifact_path = os.path.join(tmp_dir, file) + downloaded_files.append(artifact_path) + current_artifact_dir = os.path.dirname(artifact_path) + os.makedirs(current_artifact_dir) + urllib.request.urlretrieve(url, os.path.join(tmp_dir, file)) + sha1_url = ''.join([url, '.sha1']) + checksum_file = artifact_path + ".sha1" + print(' Downloading %s' % (sha1_url)) + urllib.request.urlretrieve(sha1_url, checksum_file) + print(' Verifying checksum %s' % (checksum_file)) + expected = read_fully(checksum_file) + actual = sha1(artifact_path) + if expected != actual : + raise RuntimeError('sha1 hash for %s doesn\'t match %s != %s' % (name, expected, actual)) + gpg_url = ''.join([url, '.asc']) + gpg_file = artifact_path + ".asc" + print(' Downloading %s' % (gpg_url)) + urllib.request.urlretrieve(gpg_url, gpg_file) + print(' Verifying gpg signature %s' % (gpg_file)) + # here we create a temp gpg home where we download the release key as the only key into + # when we verify the signature it will fail if the signed key is not in the keystore and that + # way we keep the executing host unmodified since we don't have to import the key into the default keystore + gpg_home_dir = os.path.join(current_artifact_dir, "gpg_home_dir") + os.makedirs(gpg_home_dir, 0o700) + run('gpg --homedir %s --keyserver pgp.mit.edu --recv-key D88E42B4' % gpg_home_dir, verbose=verbose) + run('cd %s && gpg --homedir %s --verify %s' % (current_artifact_dir, gpg_home_dir, os.path.basename(gpg_file)), verbose=verbose) + print(' ' + '*' * 80) + print() + smoke_test_release(version, downloaded_files, hash, plugins, verbose=verbose) + print(' SUCCESS') + finally: + shutil.rmtree(tmp_dir) + +def smoke_test_release(release, files, expected_hash, plugins, verbose=False): + for release_file in files: + if not os.path.isfile(release_file): + raise RuntimeError('Smoketest failed missing file %s' % (release_file)) + tmp_dir = tempfile.mkdtemp() + if release_file.endswith('tar.gz'): + run('tar -xzf %s -C %s' % (release_file, tmp_dir), verbose=verbose) + elif release_file.endswith('zip'): + run('unzip %s -d %s' % (release_file, tmp_dir), verbose=verbose) + else: + print(' Skip SmokeTest for [%s]' % release_file) + continue # nothing to do here + es_run_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/elasticsearch') + print(' Smoke testing package [%s]' % release_file) + es_plugin_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/plugin') + plugin_names = {} + for plugin in plugins: + print(' Install plugin [%s]' % (plugin)) + run('%s; %s -Des.plugins.staging=true %s %s' % (java_exe(), es_plugin_path, 'install', plugin), verbose=verbose) + plugin_names[plugin] = True + if 'shield' in plugin_names: + headers = { 'Authorization' : 'Basic %s' % base64.b64encode(b"es_admin:foobar").decode("UTF-8") } + es_shield_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/shield/esusers') + print(" Install dummy shield user") + run('%s; %s useradd es_admin -r admin -p foobar' % (java_exe(), es_shield_path), verbose=verbose) + else: + headers = {} + print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release)) + try: + run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.script.inline=on -Des.script.indexed=on -Des.repositories.url.allowed_urls=http://snapshot.test* %s -Des.pidfile=%s' + % (java_exe(), es_run_path, '-d', os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'es-smoke.pid')), verbose=verbose) + conn = HTTPConnection(host='127.0.0.1', port=9200, timeout=20) + if not wait_for_node_startup(header=headers): + print("elasticsearch logs:") + print('*' * 80) + logs = read_fully(os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'logs/prepare_release.log')) + print(logs) + print('*' * 80) + raise RuntimeError('server didn\'t start up') + try: # we now get / and /_nodes to fetch basic infos like hashes etc and the installed plugins + conn.request('GET', '', headers=headers) + res = conn.getresponse() + if res.status == 200: + version = json.loads(res.read().decode("utf-8"))['version'] + if release != version['number']: + raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number'])) + if version['build_snapshot']: + raise RuntimeError('Expected non snapshot version') + if expected_hash.startswith(version['build_hash'].strip()): + raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash'])) + print(' Verify if plugins are listed in _nodes') + conn.request('GET', '/_nodes?plugin=true&pretty=true', headers=headers) + res = conn.getresponse() + if res.status == 200: + nodes = json.loads(res.read().decode("utf-8"))['nodes'] + for _, node in nodes.items(): + node_plugins = node['plugins'] + for node_plugin in node_plugins: + if not plugin_names.get(node_plugin['name'].strip(), False): + raise RuntimeError('Unexpeced plugin %s' % node_plugin['name']) + del plugin_names[node_plugin['name']] + if plugin_names: + raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys())) + + else: + raise RuntimeError('Expected HTTP 200 but got %s' % res.status) + else: + raise RuntimeError('Expected HTTP 200 but got %s' % res.status) + finally: + conn.close() + finally: + pid_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'es-smoke.pid') + if os.path.exists(pid_path): # try reading the pid and kill the node + pid = int(read_fully(pid_path)) + os.kill(pid, signal.SIGKILL) + shutil.rmtree(tmp_dir) + print(' ' + '*' * 80) + print() + + +def parse_list(string): + return [x.strip() for x in string.split(',')] + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='SmokeTests a Release Candidate from S3 staging repo') + parser.add_argument('--version', '-v', dest='version', default=None, + help='The Elasticsearch Version to smoke-tests', required=True) + parser.add_argument('--hash', '-s', dest='hash', default=None, required=True, + help='The sha1 short hash of the git commit to smoketest') + parser.add_argument('--plugins', '-p', dest='plugins', default=[], required=False, type=parse_list, + help='A list of additional plugins to smoketest') + parser.add_argument('--verbose', '-b', dest='verbose', + help='Runs the script in verbose mode') + parser.add_argument('--fetch_url', '-u', dest='url', default=None, + help='Runs the script in verbose mode') + parser.set_defaults(hash=None) + parser.set_defaults(plugins=[]) + parser.set_defaults(version=None) + parser.set_defaults(verbose=False) + parser.set_defaults(url=None) + args = parser.parse_args() + plugins = args.plugins + version = args.version + hash = args.hash + verbose = args.verbose + url = args.url + files = [ x % {'version': version} for x in [ + 'org/elasticsearch/distribution/tar/elasticsearch/%(version)s/elasticsearch-%(version)s.tar.gz', + 'org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip', + 'org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb', + 'org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm' + ]] + verify_java_version('1.7') + if url: + download_url = url + else: + download_url = '%s/%s-%s' % ('http://download.elasticsearch.org/elasticsearch/staging', version, hash) + download_and_verify(version, hash, files, download_url, plugins=DEFAULT_PLUGINS + plugins, verbose=verbose) + + + diff --git a/dev-tools/src/main/resources/ant/integration-tests.xml b/dev-tools/src/main/resources/ant/integration-tests.xml index b5439cb73db..7b97f3e089d 100644 --- a/dev-tools/src/main/resources/ant/integration-tests.xml +++ b/dev-tools/src/main/resources/ant/integration-tests.xml @@ -219,6 +219,30 @@ <extract-pid file="@{es.pidfile}" property="integ.pid"/> <echo>Shutting down external node PID ${integ.pid}</echo> + <!-- verify with jps that this actually is the correct pid. + See if we can find the line "pid org.elasticsearch.bootstrap.Elasticsearch" in the output of jps -l.--> + <local name="jps.pidline"/> + <local name="jps.executable"/> + <local name="environment"/> + <property environment="environment"/> + <property name="jps.executable" location="${environment.JAVA_HOME}/bin/jps"/> + <exec executable="${jps.executable}" failonerror="true"> + <arg value="-l"/> + <redirector outputproperty="jps.pidline"> + <outputfilterchain> + <linecontains> + <contains value="${integ.pid} org.elasticsearch.bootstrap.Elasticsearch"/> + </linecontains> + </outputfilterchain> + </redirector> + </exec> + <fail + message="pid file at @{es.pidfile} is ${integ.pid} but jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch and this pid. + Did you run mvn clean? Maybe an old pid file is still lying around."> + <condition> + <equals arg1="${jps.pidline}" arg2=""/> + </condition> + </fail> <exec executable="taskkill" failonerror="true" osfamily="winnt"> <arg value="/F"/> @@ -271,8 +295,6 @@ <startup-elasticsearch/> </target> - <!-- TODO, for some more safety, add back some of the old jps logic - and verify the pid is really an ES process! (fail otherwise) --> <target name="stop-external-cluster" if="integ.pidfile.exists"> <stop-node/> </target> diff --git a/dev-tools/src/main/resources/forbidden/core-signatures.txt b/dev-tools/src/main/resources/forbidden/core-signatures.txt index 796f3eec850..d95b793d1ef 100644 --- a/dev-tools/src/main/resources/forbidden/core-signatures.txt +++ b/dev-tools/src/main/resources/forbidden/core-signatures.txt @@ -84,3 +84,6 @@ java.util.concurrent.Future#cancel(boolean) @defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[]) org.elasticsearch.common.io.PathUtils#get(java.net.URI) + +@defaultMessage avoid adding additional dependencies on Guava +com.google.common.collect.Lists diff --git a/distribution/licenses/joda-time-2.8.2.jar.sha1 b/distribution/licenses/joda-time-2.8.2.jar.sha1 new file mode 100644 index 00000000000..cf1e6a935c7 --- /dev/null +++ b/distribution/licenses/joda-time-2.8.2.jar.sha1 @@ -0,0 +1 @@ +d27c24204c5e507b16fec01006b3d0f1ec42aed4 diff --git a/distribution/licenses/joda-time-2.8.jar.sha1 b/distribution/licenses/joda-time-2.8.jar.sha1 deleted file mode 100644 index 4e181bf932f..00000000000 --- a/distribution/licenses/joda-time-2.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f2785d7184b97d005a44241ccaf980f43b9ccdb diff --git a/distribution/pom.xml b/distribution/pom.xml index 41a17293f45..4a22d12f458 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -90,6 +90,15 @@ </execution> </executions> </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-eclipse-plugin</artifactId> + <configuration> + <!-- Many of the modules in this build have the artifactId "elasticsearch" + which break importing into Eclipse without this. --> + <projectNameTemplate>[groupId].[artifactId]</projectNameTemplate> + </configuration> + </plugin> </plugins> <pluginManagement> diff --git a/distribution/src/main/resources/bin/elasticsearch b/distribution/src/main/resources/bin/elasticsearch index 2d831485a33..88639c502b4 100755 --- a/distribution/src/main/resources/bin/elasticsearch +++ b/distribution/src/main/resources/bin/elasticsearch @@ -121,7 +121,10 @@ case `uname` in ;; esac -export HOSTNAME=`hostname -s` +# full hostname passed through cut for portability on systems that do not support hostname -s +# export on separate line for shells that do not support combining definition and export +HOSTNAME=`hostname | cut -d. -f1` +export HOSTNAME # manual parsing to find out, if process should be detached daemonized=`echo $* | grep -E -- '(^-d |-d$| -d |--daemonize$|--daemonize )'` diff --git a/distribution/src/main/resources/bin/plugin b/distribution/src/main/resources/bin/plugin index 797d8e3a8ff..973d2f6300d 100755 --- a/distribution/src/main/resources/bin/plugin +++ b/distribution/src/main/resources/bin/plugin @@ -103,6 +103,9 @@ if [ -e "$CONF_FILE" ]; then esac fi -export HOSTNAME=`hostname -s` +# full hostname passed through cut for portability on systems that do not support hostname -s +# export on separate line for shells that do not support combining definition and export +HOSTNAME=`hostname | cut -d. -f1` +export HOSTNAME eval "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Xmx64m -Xms16m -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginManagerCliParser $args diff --git a/docs/plugins/cloud-aws.asciidoc b/docs/plugins/cloud-aws.asciidoc index 7b0fee374e3..1cab1473256 100644 --- a/docs/plugins/cloud-aws.asciidoc +++ b/docs/plugins/cloud-aws.asciidoc @@ -303,6 +303,9 @@ The following settings are supported: Number of retries in case of S3 errors. Defaults to `3`. +`read_only`:: + + Makes repository read-only. coming[2.1.0] Defaults to `false`. The S3 repositories use the same credentials as the rest of the AWS services provided by this plugin (`discovery`). See <<cloud-aws-usage>> for details. diff --git a/docs/plugins/cloud-azure.asciidoc b/docs/plugins/cloud-azure.asciidoc index 80fd189ad2e..3d0e7d0cc39 100644 --- a/docs/plugins/cloud-azure.asciidoc +++ b/docs/plugins/cloud-azure.asciidoc @@ -545,6 +545,10 @@ The Azure repository supports following settings: setting doesn't affect index files that are already compressed by default. Defaults to `false`. +`read_only`:: + + Makes repository read-only. coming[2.1.0] Defaults to `false`. + Some examples, using scripts: [source,json] diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index bdc03a0998b..46111e32a17 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -20,8 +20,10 @@ filters. |`char_filter` |An optional list of logical / registered name of char filters. -|`position_offset_gap` |An optional number of positions to increment -between each field value of a field using this analyzer. +|`position_increment_gap` |An optional number of positions to increment +between each field value of a field using this analyzer. Defaults to 100. +100 was chosen because it prevents phrase queries with reasonably large +slops (less than 100) from matching terms across field values. |======================================================================= Here is an example: @@ -30,13 +32,13 @@ Here is an example: -------------------------------------------------- index : analysis : - analyzer : + analyzer : myAnalyzer2 : type : custom tokenizer : myTokenizer1 filter : [myTokenFilter1, myTokenFilter2] char_filter : [my_html] - position_offset_gap: 256 + position_increment_gap: 256 tokenizer : myTokenizer1 : type : standard diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 134bdeb88f2..cd239922de2 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -353,7 +353,23 @@ occurs, so that the document appears in search results immediately, the `refresh` parameter can be set to `true`. Setting this option to `true` should *ONLY* be done after careful thought and verification that it does not lead to poor performance, both from an indexing and a search standpoint. Note, getting -a document using the get API is completely realtime. +a document using the get API is completely realtime and doesn't require a +refresh. + +[float] +[[index-noop]] +=== Noop Updates + +When updating a document using the index api a new version of the document is +always created even if the document hasn't changed. If this isn't acceptable +use the `_update` api with `detect_noop` set to true. This option isn't +available on the index api because the index api doesn't fetch the old source +and isn't able to compare it against the new source. + +There isn't a hard and fast rule about when noop updates aren't acceptable. +It's a combination of lots of factors like how frequently your data source +sends updates that are actually noops and how many queries per second +elasticsearch runs on the shard with receiving the updates. [float] [[timeout]] diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 06958b4e624..7d2dbfaeede 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -114,25 +114,23 @@ If both `doc` and `script` is specified, then `doc` is ignored. Best is to put your field pairs of the partial document in the script itself. [float] -=== Detecting noop - -By default if `doc` is specified then the document is always updated even -if the merging process doesn't cause any changes. Specifying `detect_noop` -as `true` will cause Elasticsearch to check if there are changes and, if -there aren't, turn the update request into a noop. For example: - +=== Detecting noop updates +If `doc` is specified its value is merged with the existing `_source`. By +default the document is only reindexed if the new `_source` field differs from +the old. Setting `detect_noop` to `false` will cause Elasticsearch to always +update the document even if it hasn't changed. For example: [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "doc" : { "name" : "new_name" }, - "detect_noop": true + "detect_noop": false }' -------------------------------------------------- -If `name` was `new_name` before the request was sent then the entire update -request is ignored. +If `name` was `new_name` before the request was sent then document is still +reindexed. [[upserts]] [float] diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 4d9578624c1..e69cfb43782 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -860,7 +860,7 @@ curl -XPOST 'localhost:9200/bank/_search?pretty' -d ' In the previous section, we skipped over a little detail called the document score (`_score` field in the search results). The score is a numeric value that is a relative measure of how well the document matches the search query that we specified. The higher the score, the more relevant the document is, the lower the score, the less relevant the document is. -But queries do not always to produce scores, in particular when they are only used for "filtering" the document set. Elasticsearch detects these situations and automatically optimizes query execution in order not to compute useless scores. +But queries do not always need to produce scores, in particular when they are only used for "filtering" the document set. Elasticsearch detects these situations and automatically optimizes query execution in order not to compute useless scores. To understand filters, let's first introduce the <<query-dsl-filtered-query,`filtered` query>>, which allows you to combine a query (like `match_all`, `match`, `bool`, etc.) together with another query which is only used for filtering. As an example, let's introduce the <<query-dsl-range-query,`range` query>>, which allows us to filter documents by a range of values. This is generally used for numeric or date filtering. diff --git a/docs/reference/mapping/fields/ttl-field.asciidoc b/docs/reference/mapping/fields/ttl-field.asciidoc index 26bc3e74417..07ce8a86b9e 100644 --- a/docs/reference/mapping/fields/ttl-field.asciidoc +++ b/docs/reference/mapping/fields/ttl-field.asciidoc @@ -104,3 +104,7 @@ may still be retrieved before they are purged. How many deletions are handled by a single <<docs-bulk,`bulk`>> request. The default value is `10000`. +==== Note on `detect_noop` +If an update tries to update just the `_ttl` without changing the `_source` of +the document it's expiration time won't be updated if `detect_noop` is `true`. +In 2.1 `detect_noop` defaults to `true`. diff --git a/docs/reference/mapping/params.asciidoc b/docs/reference/mapping/params.asciidoc index 119ce820ee4..719fa00acf3 100644 --- a/docs/reference/mapping/params.asciidoc +++ b/docs/reference/mapping/params.asciidoc @@ -28,7 +28,7 @@ The following mapping parameters are common to some or all field datatypes: * <<multi-fields,`fields`>> * <<norms,`norms`>> * <<null-value,`null_value`>> -* <<position-offset-gap,`position_offset_gap`>> +* <<position-increment-gap,`position_increment_gap`>> * <<properties,`properties`>> * <<search-analyzer,`search_analyzer`>> * <<similarity,`similarity`>> @@ -78,7 +78,7 @@ include::params/norms.asciidoc[] include::params/null-value.asciidoc[] -include::params/position-offset-gap.asciidoc[] +include::params/position-increment-gap.asciidoc[] include::params/precision-step.asciidoc[] diff --git a/docs/reference/mapping/params/position-offset-gap.asciidoc b/docs/reference/mapping/params/position-increment-gap.asciidoc similarity index 86% rename from docs/reference/mapping/params/position-offset-gap.asciidoc rename to docs/reference/mapping/params/position-increment-gap.asciidoc index d22dbd8bb1e..918e3d493a5 100644 --- a/docs/reference/mapping/params/position-offset-gap.asciidoc +++ b/docs/reference/mapping/params/position-increment-gap.asciidoc @@ -1,5 +1,5 @@ -[[position-offset-gap]] -=== `position_offset_gap` +[[position-increment-gap]] +=== `position_increment_gap` <<mapping-index,Analyzed>> string fields take term <<index-options,positions>> into account, in order to be able to support @@ -30,7 +30,7 @@ GET /my_index/groups/_search // AUTOSENSE <1> This phrase query matches our document, even though `Abraham` and `Lincoln` are in separate strings. -The `position_offset_gap` can introduce a fake gap between each array element. For instance: +The `position_increment_gap` can introduce a fake gap between each array element. For instance: [source,js] -------------------------------------------------- @@ -41,7 +41,7 @@ PUT my_index "properties": { "names": { "type": "string", - "position_offset_gap": 50 <1> + "position_increment_gap": 50 <1> } } } @@ -67,7 +67,7 @@ GET /my_index/groups/_search last term in the previous array element. <2> The phrase query no longer matches our document. -TIP: The `position_offset_gap` setting is allowed to have different settings +TIP: The `position_increment_gap` setting is allowed to have different settings for fields of the same name in the same index. Its value can be updated on existing fields using the <<indices-put-mapping,PUT mapping API>>. diff --git a/docs/reference/mapping/types/string.asciidoc b/docs/reference/mapping/types/string.asciidoc index 61041adcade..d5d7b7a0fce 100644 --- a/docs/reference/mapping/types/string.asciidoc +++ b/docs/reference/mapping/types/string.asciidoc @@ -141,10 +141,15 @@ Defaults depend on the <<mapping-index,`index`>> setting: values. Defaults to `null`, which means the field is treated as missing. If the field is `analyzed`, the `null_value` will also be analyzed. -<<position-offset-gap,`position_offset_gap`>>:: +<<position-increment-gap,`position_increment_gap`>>:: The number of fake term positions which should be inserted between each element of an array of strings. Defaults to 0. + The number of fake term position which should be inserted between each + element of an array of strings. Defaults to the position_increment_gap + configured on the analyzer which defaults to 100. 100 was chosen because it + prevents phrase queries with reasonably large slops (less than 100) from + matching terms across field values. <<mapping-store,`store`>>:: @@ -166,5 +171,3 @@ Defaults depend on the <<mapping-index,`index`>> setting: Whether term vectors should be stored for an <<mapping-index,`analyzed`>> field. Defaults to `no`. - - diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc index bc664c2920b..fe0319cdd0a 100644 --- a/docs/reference/migration/migrate_2_0.asciidoc +++ b/docs/reference/migration/migrate_2_0.asciidoc @@ -13,13 +13,6 @@ latest 1.x version of Elasticsearch first, in order to upgrade your indices or to delete the old indices. Elasticsearch will not start in the presence of old indices. -[float] -=== Network binds to localhost only - -Elasticsearch now binds to the loopback interface by default (usually -`127.0.0.1` or `::1`). The `network.host` setting can be specified to change -this behavior. - [float] === Elasticsearch migration plugin @@ -29,6 +22,8 @@ Elasticsearch 2.0. Please install and run the plugin *before* upgrading. include::migrate_2_0/removals.asciidoc[] +include::migrate_2_0/network.asciidoc[] + include::migrate_2_0/striping.asciidoc[] include::migrate_2_0/mapping.asciidoc[] @@ -55,4 +50,4 @@ include::migrate_2_0/settings.asciidoc[] include::migrate_2_0/stats.asciidoc[] -include::migrate_2_0/java.asciidoc[] \ No newline at end of file +include::migrate_2_0/java.asciidoc[] diff --git a/docs/reference/migration/migrate_2_0/mapping.asciidoc b/docs/reference/migration/migrate_2_0/mapping.asciidoc index a50fc9c6a62..0f3b94ea076 100644 --- a/docs/reference/migration/migrate_2_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_2_0/mapping.asciidoc @@ -164,6 +164,12 @@ fields in differnt types, this warning has been relaxed: type names may now contain dots, but they may not *begin* with a dot. The only exception to this is the special `.percolator` type. +==== Type names may not be longer than 255 characters + +Mapping type names may not be longer than 255 characters. Long type names +will continue to function on indices created before upgrade, but it will not +be possible create types with long names in new indices. + ==== Types may no longer be deleted In 1.x it was possible to delete a type mapping, along with all of the @@ -364,6 +370,16 @@ Along with this change, the following settings have ben removed: * `index.mapper.default_mapping_location` * `index.mapper.default_percolator_mapping_location` +==== Fielddata formats + +Now that doc values are the default for fielddata, specialized in-memory +formats have become an esoteric option. These fielddata formats have been removed: + +* `fst` on string fields +* `compressed` on geo points + +The default fielddata format will be used instead. + ==== Posting and doc-values codecs It is no longer possible to specify per-field postings and doc values formats @@ -384,7 +400,15 @@ The `compress` and `compress_threshold` options have been removed from the default. If you would like to increase compression levels, use the new <<index-codec,`index.codec: best_compression`>> setting instead. +==== position_offset_gap +The `position_offset_gap` option is renamed to 'position_increment_gap'. This was +done to clear away the confusion. Elasticsearch's 'position_increment_gap' now is +mapped directly to Lucene's 'position_increment_gap' - - +The default `position_increment_gap` is now 100. Indexes created in Elasticsearch +2.0.0 will default to using 100 and indexes created before that will continue +to use the old default of 0. This was done to prevent phrase queries from +matching across different values of the same term unexpectedly. Specifically, +100 was chosen to cause phrase queries with slops up to 99 to match only within +a single value of a field. diff --git a/docs/reference/migration/migrate_2_0/network.asciidoc b/docs/reference/migration/migrate_2_0/network.asciidoc new file mode 100644 index 00000000000..78a482c3d17 --- /dev/null +++ b/docs/reference/migration/migrate_2_0/network.asciidoc @@ -0,0 +1,42 @@ +=== Network changes + +==== Bind to localhost + +Elasticsearch 2.x will only bind to localhost by default. It will try to bind +to both 127.0.0.1 (IPv4) and [::1] (IPv6), but will work happily in +environments where only IPv4 or IPv6 is available. This change prevents +Elasticsearch from trying to connect to other nodes on your network unless you +specifically tell it to do so. When moving to production you should configure +the `network.host` parameter, either in the `elasticsearch.yml` config file or +on the command line: + +[source,sh] +-------------------- +bin/elasticsearch --network.host 192.168.1.5 +bin/elasticsearch --network.host _non_loopback_ +-------------------- + +The full list of options that network.host accepts can be found in the <<modules-network>>. + +==== Multicast removed + +Multicast has been removed (although it is still +{plugins}/discovery-multicast.html[provided as a plugin] for now). Instead, +and only when bound to localhost, Elasticsearch will use unicast to contact +the first 5 ports in the `transport.tcp.port` range, which defaults to +`9300-9400`. + +This preserves the zero-config auto-clustering experience for the developer, +but it means that you will have to provide a list of <<unicast,unicast hosts>> +when moving to production, for instance: + +[source,yaml] +--------------------- +discovery.zen.ping.unicast.hosts: [ 192.168.1.2, 192.168.1.3 ] +--------------------- + +You don’t need to list all of the nodes in your cluster as unicast hosts, but +you should specify at least a quorum (majority) of master-eligible nodes. A +big cluster will typically have three dedicated master nodes, in which case we +recommend listing all three of them as unicast hosts. + diff --git a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc b/docs/reference/migration/migrate_2_0/query_dsl.asciidoc index 31283e9ce33..cb9e1581a91 100644 --- a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc +++ b/docs/reference/migration/migrate_2_0/query_dsl.asciidoc @@ -178,7 +178,7 @@ The parameter `percent_terms_to_match` has been removed in favor of The `limit` filter is deprecated and becomes a no-op. You can achieve similar behaviour using the <<search-request-body,terminate_after>> parameter. -==== Jave plugins registering custom queries +==== Java plugins registering custom queries Java plugins that register custom queries can do so by using the `IndicesQueriesModule#addQuery(Class<? extends QueryParser>)` method. Other diff --git a/docs/reference/migration/migrate_2_0/removals.asciidoc b/docs/reference/migration/migrate_2_0/removals.asciidoc index f02bf3a3f7f..379565cb90e 100644 --- a/docs/reference/migration/migrate_2_0/removals.asciidoc +++ b/docs/reference/migration/migrate_2_0/removals.asciidoc @@ -40,6 +40,24 @@ can install the plugin with: ./bin/plugin install delete-by-query ------------------ +See {plugins}/plugins-delete-by-query.html for more information. + +==== Multicast Discovery is now a plugin + +Support for multicast is very patchy. Linux doesn’t allow multicast listening on localhost, +while OS/X sends multicast broadcasts across all interfaces regardless of the configured +bind address. On top of that, some networks have multicast disabled by default. + +This feature has been moved to a plugin. The default discovery mechanism now uses +unicast, with a default setup which looks for the first 5 ports on localhost. If you +still need to use multicast discovery, you can install the plugin with: + +[source,sh] +------------------ +./bin/plugin install discovery-multicast +------------------ + +See {plugins}/discovery-multicast.html for more information. ==== `_shutdown` API diff --git a/docs/reference/migration/migrate_2_0/settings.asciidoc b/docs/reference/migration/migrate_2_0/settings.asciidoc index b11fb0c0a9f..f5898a4ea19 100644 --- a/docs/reference/migration/migrate_2_0/settings.asciidoc +++ b/docs/reference/migration/migrate_2_0/settings.asciidoc @@ -38,6 +38,28 @@ PUT _settings In 2.0, the above request will throw an exception. Instead the refresh interval should be set to `"1s"` for one second. +==== Merge and merge throttling settings + +The tiered merge policy is now the only supported merge policy. These settings +have been removed: + +* `index.merge.policy.type` +* `index.merge.policy.min_merge_size` +* `index.merge.policy.max_merge_size` +* `index.merge.policy.merge_factor` +* `index.merge.policy.max_merge_docs` +* `index.merge.policy.calibrate_size_by_deletes` +* `index.merge.policy.min_merge_docs` +* `index.merge.policy.max_merge_docs` + +Merge throttling now uses a feedback loop to auto-throttle. These settings +have been removed: + +* `indices.store.throttle.type` +* `indices.store.throttle.max_bytes_per_sec` +* `index.store.throttle.type` +* `index.store.throttle.max_bytes_per_sec` + ==== Shadow replica settings The `node.enable_custom_paths` setting has been removed and replaced by the diff --git a/docs/reference/migration/migrate_2_0/stats.asciidoc b/docs/reference/migration/migrate_2_0/stats.asciidoc index 84635f5713e..b75246e4f11 100644 --- a/docs/reference/migration/migrate_2_0/stats.asciidoc +++ b/docs/reference/migration/migrate_2_0/stats.asciidoc @@ -45,16 +45,6 @@ used separately to control whether `routing_nodes` should be returned. The deprecated index status API has been removed. -==== `cat` APIs verbose by default - -The `cat` APIs now default to being verbose, which means they output column -headers by default. Verbosity can be turned off with the `v` parameter: - -[source,sh] ------------------ -GET _cat/shards?v=0 ------------------ - ==== Nodes Stats API Queue lengths are now reported as basic numeric so they can easily processed by code. Before we used a human diff --git a/docs/reference/migration/migrate_2_1.asciidoc b/docs/reference/migration/migrate_2_1.asciidoc index 7542fb3d1df..63092d9250e 100644 --- a/docs/reference/migration/migrate_2_1.asciidoc +++ b/docs/reference/migration/migrate_2_1.asciidoc @@ -25,3 +25,13 @@ GET /my_index/_search?scroll=2m Scroll requests sorted by `_doc` have been optimized to more efficiently resume from where the previous request stopped, so this will have the same performance characteristics as the former `scan` search type. + +=== Update changes + +==== Updates now `detect_noop` by default + +We've switched the default value of the `detect_noop` option from `false` to +`true`. This means that Elasticsearch will ignore updates that don't change +source unless you explicitly set `"detect_noop": false`. `detect_noop` was +always computationally cheap compared to the expense of the update which can be +thought of as a delete operation followed by an index operation. diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 32f412e204c..90f2a026cb9 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -121,6 +121,7 @@ The following settings are supported: using size value notation, i.e. 1g, 10m, 5k. Defaults to `null` (unlimited chunk size). `max_restore_bytes_per_sec`:: Throttles per node restore rate. Defaults to `40mb` per second. `max_snapshot_bytes_per_sec`:: Throttles per node snapshot rate. Defaults to `40mb` per second. +`readonly`:: Makes repository read-only. coming[2.1.0] Defaults to `false`. [float] ===== Read-only URL Repository diff --git a/docs/river/index.asciidoc b/docs/river/index.asciidoc deleted file mode 100644 index 6ccbbb9d449..00000000000 --- a/docs/river/index.asciidoc +++ /dev/null @@ -1,6 +0,0 @@ -[[river]] -= Rivers - -Rivers were deprecated in Elasticsearch 1.5 and removed in Elasticsearch 2.0. - -See https://www.elastic.co/blog/deprecating_rivers for more details. \ No newline at end of file diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.0.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.0.jar.sha1 deleted file mode 100644 index c9a6d32ab96..00000000000 --- a/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9732a4e80aad23101faa442700c2172a37257c43 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.12.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.12.jar.sha1 new file mode 100644 index 00000000000..659b6cc62f5 --- /dev/null +++ b/plugins/cloud-aws/licenses/aws-java-sdk-core-1.10.12.jar.sha1 @@ -0,0 +1 @@ +7ff51040bbcc9085dcb9a24a2c2a3cc7ac995988 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.0.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.0.jar.sha1 deleted file mode 100644 index 4c132113999..00000000000 --- a/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5dc3760021fba0ae67b4f11d37ffa52a4eac4f4 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.12.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.12.jar.sha1 new file mode 100644 index 00000000000..60bae7e37ee --- /dev/null +++ b/plugins/cloud-aws/licenses/aws-java-sdk-ec2-1.10.12.jar.sha1 @@ -0,0 +1 @@ +b0712cc659e72b9da0f5b03872d2476ab4a695f7 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.0.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.0.jar.sha1 deleted file mode 100644 index 02adba33c64..00000000000 --- a/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -48f0aab551fa9e2eb4c81e2debf40e9fff595405 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.12.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.12.jar.sha1 new file mode 100644 index 00000000000..1948b0d2b01 --- /dev/null +++ b/plugins/cloud-aws/licenses/aws-java-sdk-kms-1.10.12.jar.sha1 @@ -0,0 +1 @@ +31afbe46b65e9933316c7e8dfb8b88dc4b37b6ba diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.0.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.0.jar.sha1 deleted file mode 100644 index a76faf74611..00000000000 --- a/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -891e42d29e8f9474f83c050e4ee6a4512d4f4e71 diff --git a/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.12.jar.sha1 b/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.12.jar.sha1 new file mode 100644 index 00000000000..9814735f4d7 --- /dev/null +++ b/plugins/cloud-aws/licenses/aws-java-sdk-s3-1.10.12.jar.sha1 @@ -0,0 +1 @@ +c9e2593fdf398c5f8906a704db037d17b2de4b2a diff --git a/plugins/cloud-aws/pom.xml b/plugins/cloud-aws/pom.xml index 157a35077d8..7d0ea71940a 100644 --- a/plugins/cloud-aws/pom.xml +++ b/plugins/cloud-aws/pom.xml @@ -16,7 +16,7 @@ <properties> <elasticsearch.plugin.classname>org.elasticsearch.plugin.cloud.aws.CloudAwsPlugin</elasticsearch.plugin.classname> - <amazonaws.version>1.10.0</amazonaws.version> + <amazonaws.version>1.10.12</amazonaws.version> <tests.jvms>1</tests.jvms> <tests.rest.suite>cloud_aws</tests.rest.suite> <tests.rest.load_packaged>false</tests.rest.load_packaged> diff --git a/plugins/cloud-aws/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java b/plugins/cloud-aws/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java index 0f52a318685..24c7196c681 100644 --- a/plugins/cloud-aws/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java +++ b/plugins/cloud-aws/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java @@ -34,7 +34,6 @@ import java.io.InputStream; import java.net.URL; import java.util.Date; import java.util.List; - import org.elasticsearch.common.SuppressForbidden; /** @@ -579,4 +578,54 @@ public class AmazonS3Wrapper implements AmazonS3 { public boolean isRequesterPaysEnabled(String bucketName) throws AmazonServiceException, AmazonClientException { return delegate.isRequesterPaysEnabled(bucketName); } + + @Override + public ObjectListing listNextBatchOfObjects(ListNextBatchOfObjectsRequest listNextBatchOfObjectsRequest) throws AmazonClientException, AmazonServiceException { + return delegate.listNextBatchOfObjects(listNextBatchOfObjectsRequest); + } + + @Override + public VersionListing listNextBatchOfVersions(ListNextBatchOfVersionsRequest listNextBatchOfVersionsRequest) throws AmazonClientException, AmazonServiceException { + return delegate.listNextBatchOfVersions(listNextBatchOfVersionsRequest); + } + + @Override + public Owner getS3AccountOwner(GetS3AccountOwnerRequest getS3AccountOwnerRequest) throws AmazonClientException, AmazonServiceException { + return delegate.getS3AccountOwner(getS3AccountOwnerRequest); + } + + @Override + public BucketLoggingConfiguration getBucketLoggingConfiguration(GetBucketLoggingConfigurationRequest getBucketLoggingConfigurationRequest) throws AmazonClientException, AmazonServiceException { + return delegate.getBucketLoggingConfiguration(getBucketLoggingConfigurationRequest); + } + + @Override + public BucketVersioningConfiguration getBucketVersioningConfiguration(GetBucketVersioningConfigurationRequest getBucketVersioningConfigurationRequest) throws AmazonClientException, AmazonServiceException { + return delegate.getBucketVersioningConfiguration(getBucketVersioningConfigurationRequest); + } + + @Override + public BucketLifecycleConfiguration getBucketLifecycleConfiguration(GetBucketLifecycleConfigurationRequest getBucketLifecycleConfigurationRequest) { + return delegate.getBucketLifecycleConfiguration(getBucketLifecycleConfigurationRequest); + } + + @Override + public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(GetBucketCrossOriginConfigurationRequest getBucketCrossOriginConfigurationRequest) { + return delegate.getBucketCrossOriginConfiguration(getBucketCrossOriginConfigurationRequest); + } + + @Override + public BucketTaggingConfiguration getBucketTaggingConfiguration(GetBucketTaggingConfigurationRequest getBucketTaggingConfigurationRequest) { + return delegate.getBucketTaggingConfiguration(getBucketTaggingConfigurationRequest); + } + + @Override + public BucketNotificationConfiguration getBucketNotificationConfiguration(GetBucketNotificationConfigurationRequest getBucketNotificationConfigurationRequest) throws AmazonClientException, AmazonServiceException { + return delegate.getBucketNotificationConfiguration(getBucketNotificationConfigurationRequest); + } + + @Override + public BucketReplicationConfiguration getBucketReplicationConfiguration(GetBucketReplicationConfigurationRequest getBucketReplicationConfigurationRequest) throws AmazonServiceException, AmazonClientException { + return delegate.getBucketReplicationConfiguration(getBucketReplicationConfigurationRequest); + } } diff --git a/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceSimpleMock.java b/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceSimpleMock.java index 6323e91d1df..997a8aa17dc 100644 --- a/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceSimpleMock.java +++ b/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceSimpleMock.java @@ -78,10 +78,10 @@ public class AzureComputeServiceSimpleMock extends AzureComputeServiceAbstractMo endpoint.setName("elasticsearch"); endpoint.setVirtualIPAddress(InetAddress.getLoopbackAddress()); endpoint.setPort(9400); - instance.setInstanceEndpoints(CollectionUtils.newArrayList(endpoint)); + instance.setInstanceEndpoints(CollectionUtils.newSingletonArrayList(endpoint)); - deployment.setRoleInstances(CollectionUtils.newArrayList(instance)); - response.setDeployments(CollectionUtils.newArrayList(deployment)); + deployment.setRoleInstances(CollectionUtils.newSingletonArrayList(instance)); + response.setDeployments(CollectionUtils.newSingletonArrayList(deployment)); return response; } diff --git a/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceTwoNodesMock.java b/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceTwoNodesMock.java index a87a4ef3434..ee8bf350f63 100644 --- a/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceTwoNodesMock.java +++ b/plugins/cloud-azure/src/test/java/org/elasticsearch/cloud/azure/AzureComputeServiceTwoNodesMock.java @@ -19,15 +19,22 @@ package org.elasticsearch.cloud.azure; -import com.microsoft.windowsazure.management.compute.models.*; +import com.microsoft.windowsazure.management.compute.models.DeploymentSlot; +import com.microsoft.windowsazure.management.compute.models.DeploymentStatus; +import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; +import com.microsoft.windowsazure.management.compute.models.InstanceEndpoint; +import com.microsoft.windowsazure.management.compute.models.RoleInstance; import org.elasticsearch.cloud.azure.management.AzureComputeServiceAbstractMock; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; import java.net.InetAddress; +import java.util.ArrayList; +import java.util.Arrays; + +import static org.elasticsearch.common.util.CollectionUtils.newSingletonArrayList; /** @@ -78,7 +85,7 @@ public class AzureComputeServiceTwoNodesMock extends AzureComputeServiceAbstract endpoint1.setName("elasticsearch"); endpoint1.setVirtualIPAddress(InetAddress.getLoopbackAddress()); endpoint1.setPort(9400); - instance1.setInstanceEndpoints(CollectionUtils.newArrayList(endpoint1)); + instance1.setInstanceEndpoints(newSingletonArrayList(endpoint1)); // Fake a first instance RoleInstance instance2 = new RoleInstance(); @@ -92,11 +99,11 @@ public class AzureComputeServiceTwoNodesMock extends AzureComputeServiceAbstract endpoint2.setName("elasticsearch"); endpoint2.setVirtualIPAddress(InetAddress.getLoopbackAddress()); endpoint2.setPort(9401); - instance2.setInstanceEndpoints(CollectionUtils.newArrayList(endpoint2)); + instance2.setInstanceEndpoints(newSingletonArrayList(endpoint2)); - deployment.setRoleInstances(CollectionUtils.newArrayList(instance1, instance2)); + deployment.setRoleInstances(new ArrayList<>(Arrays.asList(instance1, instance2))); - response.setDeployments(CollectionUtils.newArrayList(deployment)); + response.setDeployments(newSingletonArrayList(deployment)); return response; } diff --git a/plugins/cloud-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/cloud-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index 7e6d3548674..7321d032d69 100644 --- a/plugins/cloud-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/cloud-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -29,12 +29,12 @@ import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.InstanceList; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CollectionUtils; import java.io.IOException; import java.security.GeneralSecurityException; @@ -43,6 +43,8 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.util.CollectionUtils.eagerTransform; + /** * */ @@ -62,7 +64,7 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute logger.debug("get instances for project [{}], zones [{}]", project, zones); - List<List<Instance>> instanceListByZone = Lists.transform(zones, new Function<String, List<Instance>>() { + List<List<Instance>> instanceListByZone = eagerTransform(zones, new Function<String, List<Instance>>() { @Override public List<Instance> apply(String zoneId) { try { @@ -83,7 +85,7 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute }); // Collapse instances from all zones into one neat list - List<Instance> instanceList = Lists.newArrayList(Iterables.concat(instanceListByZone)); + List<Instance> instanceList = CollectionUtils.iterableAsArrayList(Iterables.concat(instanceListByZone)); if (instanceList.size() == 0) { logger.warn("disabling GCE discovery. Can not get list of nodes"); diff --git a/plugins/delete-by-query/rest-api-spec/api/delete_by_query.json b/plugins/delete-by-query/rest-api-spec/api/delete_by_query.json index 54259b086c8..5b2fa006fa3 100644 --- a/plugins/delete-by-query/rest-api-spec/api/delete_by_query.json +++ b/plugins/delete-by-query/rest-api-spec/api/delete_by_query.json @@ -1,6 +1,6 @@ { "delete_by_query": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-delete-by-query.html", "methods": ["DELETE"], "url": { "path": "/{index}/_query", diff --git a/pom.xml b/pom.xml index 8295e8ebe86..b4fa4af2da0 100644 --- a/pom.xml +++ b/pom.xml @@ -354,7 +354,7 @@ <artifactId>joda-time</artifactId> <!-- joda 2.0 moved to using volatile fields for datetime --> <!-- When updating to a new version, make sure to update our copy of BaseDateTime --> - <version>2.8</version> + <version>2.8.2</version> </dependency> <dependency> <groupId>org.joda</groupId> @@ -528,7 +528,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-enforcer-plugin</artifactId> - <version>1.4</version> + <version>1.4.1</version> <executions> <execution> <id>enforce-versions</id> @@ -982,7 +982,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-eclipse-plugin</artifactId> - <version>2.9</version> + <version>2.10</version> <configuration> <buildOutputDirectory>eclipse-build</buildOutputDirectory> <downloadSources>true</downloadSources> diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json index eabc596360c..07c72a2ac98 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json @@ -32,7 +32,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json index 11d0915fce0..9de5a729faa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.allocation.json @@ -37,7 +37,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json index 79c347d2548..2661af347d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.count.json @@ -32,7 +32,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json index b070281b92f..a380b558e89 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.fielddata.json @@ -37,7 +37,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false }, "fields": { "type": "list", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json index 68bad5bc675..bd0cfcedd53 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.health.json @@ -33,7 +33,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json index 0a5fe158136..b5c487b9521 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json @@ -42,7 +42,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json index ccce8d6dd2c..d3e35330e1d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.master.json @@ -28,7 +28,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json index 157a33f0408..cdbe75ac936 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodeattrs.json @@ -28,7 +28,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json index 8a801c018dc..1d8bcda3c01 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.nodes.json @@ -28,7 +28,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json index c8512a9f16d..88afa9b0c40 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.pending_tasks.json @@ -28,7 +28,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json index eab71f0ac40..55ee66a2792 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.plugins.json @@ -26,7 +26,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json index 2fe2049ce40..c235406221a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.recovery.json @@ -33,7 +33,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json index f032f4d38e6..ebb989b4cf6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json @@ -24,7 +24,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json index 5aa8e7c8e34..6d941f17d10 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json @@ -32,7 +32,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json index c1431ae0d61..cb8e5e13632 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.thread_pool.json @@ -28,7 +28,7 @@ "v": { "type": "boolean", "description": "Verbose mode. Display column headers", - "default": true + "default": false }, "full_id": { "type": "boolean", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index a738fa848cc..bdac6d9a9ab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -20,6 +20,10 @@ "master_timeout": { "type" : "time", "description" : "Specify timeout for connection to master" + }, + "update_all_types": { + "type": "boolean", + "description": "Whether to update the mapping for all fields with the same name across all types or not" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index c6b547914ef..5fce0bcefc8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -38,6 +38,10 @@ "options" : ["open","closed","none","all"], "default" : "open", "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + }, + "update_all_types": { + "type": "boolean", + "description": "Whether to update the mapping for all fields with the same name across all types or not" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_warmer.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_warmer.json index d1dce8bd0e5..9039367d15f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_warmer.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_warmer.json @@ -41,7 +41,7 @@ }, "request_cache": { "type" : "boolean", - "description" : "Specify whether the request to be wamred shoyd use the request cache, defaults to index level setting" + "description" : "Specify whether the request to be warmed should use the request cache, defaults to index level setting" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json index 0c179c6c7ac..f3d644d5e34 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json @@ -1,6 +1,6 @@ { "indices.shard_stores": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shard-stores.html", + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shards-stores.html", "methods": ["GET"], "url": { "path": "/_shard_stores", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml index dea14a45bd3..640d77e0183 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml @@ -17,8 +17,7 @@ "Empty cluster": - do: - cat.aliases: - v: false + cat.aliases: {} - match: $body: | @@ -38,8 +37,7 @@ name: test_alias - do: - cat.aliases: - v: false + cat.aliases: {} - match: $body: | @@ -75,8 +73,7 @@ term: foo: bar - do: - cat.aliases: - v: false + cat.aliases: {} - match: $body: | @@ -108,14 +105,12 @@ - do: cat.aliases: name: test_1 - v: false - match: $body: /^test_1 .+ \n$/ - do: cat.aliases: - v: false name: test_2 - match: @@ -123,7 +118,6 @@ - do: cat.aliases: - v: false name: test_* - match: @@ -178,8 +172,7 @@ - do: cat.aliases: - v: false - h: [index, alias] + h: [index, alias] - match: $body: /^ test \s+ test_1 \s+ $/ @@ -195,9 +188,3 @@ index \s+ alias \s+ \n test \s+ test_1 \s+ \n $/ - - - - - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index ea508e15919..04a534e18b6 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -3,7 +3,6 @@ - do: cat.allocation: help: true - v: false - match: $body: | @@ -21,8 +20,7 @@ "Empty cluster": - do: - cat.allocation: - v: false + cat.allocation: {} - match: $body: | @@ -51,8 +49,7 @@ wait_for_status: yellow - do: - cat.allocation: - v: false + cat.allocation: {} - match: $body: | @@ -82,7 +79,6 @@ - do: cat.allocation: node_id: _master - v: false - match: $body: | @@ -102,7 +98,6 @@ - do: cat.allocation: node_id: non_existent - v: false - match: $body: | @@ -116,7 +111,6 @@ - do: cat.allocation: node_id: "*" - v: false - match: $body: | @@ -215,7 +209,6 @@ - do: cat.allocation: bytes: g - v: false - match: $body: | @@ -231,4 +224,3 @@ \n )+ $/ - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml index 76755327140..1a62ab063d9 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml @@ -14,8 +14,7 @@ "Test cat count output": - do: - cat.count: - v: false + cat.count: {} - match: $body: | @@ -31,8 +30,7 @@ refresh: true - do: - cat.count: - v: false + cat.count: {} - match: $body: | @@ -50,7 +48,6 @@ - do: cat.count: h: count - v: false - match: $body: | @@ -61,7 +58,6 @@ - do: cat.count: index: index1 - v: false - match: $body: | @@ -77,4 +73,3 @@ $body: | /^ epoch \s+ timestamp \s+ count \s+ \n \d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \s+ \n $/ - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml index ada97021e6e..bc362fae58c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml @@ -17,8 +17,7 @@ "Test cat fielddata output": - do: - cat.fielddata: - v: false + cat.fielddata: {} - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml index ad4fd2f5521..9bfde46a371 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml @@ -28,8 +28,7 @@ "Empty cluster": - do: - cat.health: - v: false + cat.health: {} - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index d8beff155c7..a5a67d1a557 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -2,8 +2,7 @@ "Test cat indices output": - do: - cat.indices: - v: false + cat.indices: {} - match: $body: | @@ -20,8 +19,7 @@ cluster.health: wait_for_status: yellow - do: - cat.indices: - v: false + cat.indices: {} - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml index 5af720effec..f076a3b1859 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml @@ -6,8 +6,7 @@ reason: "Waiting for #12558" - do: - cat.nodeattrs: - v: false + cat.nodeattrs: {} - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml index 12545f0a64b..66145f47b0a 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -2,8 +2,7 @@ "Test cat nodes output": - do: - cat.nodes: - v: false + cat.nodes: {} - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index 8ac01eaf317..4bc3e996fca 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -2,8 +2,7 @@ "Test cat recovery output": - do: - cat.recovery: - v: false + cat.recovery: {} - match: $body: | @@ -20,8 +19,7 @@ cluster.health: wait_for_status: yellow - do: - cat.recovery: - v: false + cat.recovery: {} - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.segments/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.segments/10_basic.yaml index b64d14ebd50..c31eb1b79c5 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.segments/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.segments/10_basic.yaml @@ -26,8 +26,7 @@ "Test cat segments output": - do: - cat.segments: - v: false + cat.segments: {} - match: $body: | @@ -50,8 +49,7 @@ cluster.health: wait_for_status: green - do: - cat.segments: - v: false + cat.segments: {} - match: $body: | /^(index1 \s+ \d \s+ (p|r) \s+ \d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} \s+ _\d (\s\d){3} \s+ @@ -81,8 +79,7 @@ - do: - cat.segments: - v: false + cat.segments: {} - match: $body: | /^(index(1|2) .+ \n?){2}$/ @@ -90,7 +87,6 @@ - do: cat.segments: index: index2 - v: false - match: $body: | /^(index2 .+ \n?)$/ @@ -118,4 +114,3 @@ catch: forbidden cat.segments: index: index1 - v: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index 2838dcb1a55..40f8740db96 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -77,8 +77,7 @@ "Test cat shards output": - do: - cat.shards: - v: false + cat.shards: {} - match: $body: | @@ -95,8 +94,7 @@ cluster.health: wait_for_status: yellow - do: - cat.shards: - v: false + cat.shards: {} - match: $body: | @@ -115,8 +113,7 @@ wait_for_relocating_shards: 0 - do: - cat.shards: - v: false + cat.shards: {} - match: $body: | /^(index(1|2) \s+ \d \s+ (p|r) \s+ ((STARTED|INITIALIZING|RELOCATING) \s+ (\d \s+ (\d+|\d+[.]\d+)(kb|b) \s+)? \d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} \s+ .+|UNASSIGNED \s+) \n?){15}$/ @@ -124,7 +121,6 @@ - do: cat.shards: index: index2 - v: false - match: $body: | /^(index2 \s+ \d \s+ (p|r) \s+ ((STARTED|INITIALIZING|RELOCATING) \s+ (\d \s+ (\d+|\d+[.]\d+)(kb|b) \s+)? \d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} \s+ .+|UNASSIGNED \s+) \n?){5}$/ @@ -146,8 +142,6 @@ - do: cat.shards: index: index3 - v: false - match: $body: | /^(index3 \s+ \d \s+ (p|s) \s+ ((STARTED|INITIALIZING|RELOCATING) \s+ (\d \s+ (\d+|\d+[.]\d+)(kb|b) \s+)? \d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} \s+ .+|UNASSIGNED \s+) \n?){2}$/ - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml index edb87ce27b9..0c8ac5b4028 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -2,8 +2,7 @@ "Test cat thread_pool output": - do: - cat.thread_pool: - v: false + cat.thread_pool: {} - match: $body: | @@ -22,7 +21,6 @@ - do: cat.thread_pool: h: pid,id,h,i,po - v: false - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/75_ttl.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/75_ttl.yaml index cdd5d7e1d0f..09179ebd5ac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/75_ttl.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/75_ttl.yaml @@ -56,7 +56,7 @@ - lte: { _ttl: 100000} - gt: { _ttl: 10000} -# duration +# seconds - do: update: @@ -66,6 +66,7 @@ body: doc: { foo: baz } upsert: { foo: bar } + detect_noop: false ttl: 20s - do: @@ -89,4 +90,3 @@ body: { foo: bar } ttl: 20s timestamp: 2013-06-23T18:14:40 -