diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index e1a78ac3b8c..e1a8d81b7a0 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -58,28 +58,28 @@ public class CustomFieldQuery extends FieldQuery { } @Override - void flatten(Query sourceQuery, IndexReader reader, Collection flatQueries) throws IOException { + void flatten(Query sourceQuery, IndexReader reader, Collection flatQueries, float boost) throws IOException { if (sourceQuery instanceof SpanTermQuery) { - super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries); + super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries, boost); } else if (sourceQuery instanceof ConstantScoreQuery) { - flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries); + flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries, boost); } else if (sourceQuery instanceof FunctionScoreQuery) { - flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries); + flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost); } else if (sourceQuery instanceof FilteredQuery) { - flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries); + flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries, boost); flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries); } else if (sourceQuery instanceof MultiPhrasePrefixQuery) { - flatten(sourceQuery.rewrite(reader), reader, flatQueries); + flatten(sourceQuery.rewrite(reader), reader, flatQueries, boost); } else if (sourceQuery instanceof FiltersFunctionScoreQuery) { - flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries); + flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost); } else if (sourceQuery instanceof MultiPhraseQuery) { MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery); convertMultiPhraseQuery(0, new int[q.getTermArrays().size()], q, q.getTermArrays(), q.getPositions(), reader, flatQueries); } else if (sourceQuery instanceof BlendedTermQuery) { final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery; - flatten(blendedTermQuery.rewrite(reader), reader, flatQueries); + flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost); } else { - super.flatten(sourceQuery, reader, flatQueries); + super.flatten(sourceQuery, reader, flatQueries, boost); } } @@ -93,7 +93,7 @@ public class CustomFieldQuery extends FieldQuery { if (numTerms > 16) { for (Term[] currentPosTerm : terms) { for (Term term : currentPosTerm) { - super.flatten(new TermQuery(term), reader, flatQueries); + super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); } } return; @@ -111,7 +111,7 @@ public class CustomFieldQuery extends FieldQuery { } PhraseQuery query = queryBuilder.build(); query.setBoost(orig.getBoost()); - this.flatten(query, reader, flatQueries); + this.flatten(query, reader, flatQueries, orig.getBoost()); } else { Term[] t = terms.get(currentPos); for (int i = 0; i < t.length; i++) { @@ -127,7 +127,7 @@ public class CustomFieldQuery extends FieldQuery { return; } if (sourceFilter instanceof QueryWrapperFilter) { - flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries); + flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries, 1.0F); } } } diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index aaf5c9e48fd..9ad449f1f45 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -19,12 +19,13 @@ package org.elasticsearch.action; -import com.google.common.base.Preconditions; import org.elasticsearch.action.support.PlainListenableActionFuture; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Objects; + /** * */ @@ -36,7 +37,7 @@ public abstract class ActionRequestBuilder action, Request request) { - Preconditions.checkNotNull(action, "action must not be null"); + Objects.requireNonNull(action, "action must not be null"); this.action = action; this.request = request; this.client = client; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index b9a372ea074..58ad6718199 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -36,11 +36,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashSet; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; - /** */ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadAction { @@ -72,7 +71,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA ClusterState clusterState = clusterService.state(); String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); - Set nodeIds = newHashSet(); + Set nodeIds = new HashSet<>(); GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference()); ShardRouting shard; ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()]; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index eeba6af8f66..7e1a47c450c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -33,12 +33,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; - /** * Status of a snapshot */ @@ -103,7 +102,7 @@ public class SnapshotStatus implements ToXContent, Streamable { ImmutableMap.Builder indicesStatus = ImmutableMap.builder(); - Set indices = newHashSet(); + Set indices = new HashSet<>(); for (SnapshotIndexShardStatus shard : shards) { indices.add(shard.getIndex()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 57518f4e83d..fca3f21b765 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -44,12 +44,11 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; - /** */ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction { @@ -94,7 +93,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction nodesIds = newHashSet(); + Set nodesIds = new HashSet<>(); for (SnapshotsInProgress.Entry entry : currentSnapshots) { for (SnapshotsInProgress.ShardSnapshotStatus status : entry.shards().values()) { if (status.nodeId() != null) { @@ -140,7 +139,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction builder = new ArrayList<>(); - Set currentSnapshotIds = newHashSet(); + Set currentSnapshotIds = new HashSet<>(); if (!currentSnapshots.isEmpty()) { Map nodeSnapshotStatusMap; if (nodeSnapshotStatuses != null) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 5cabb7f5334..3916d837f4d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.alias; -import com.google.common.collect.Sets; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.ActionFilters; @@ -38,7 +37,11 @@ import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundE import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * Add/remove aliases action @@ -68,7 +71,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction indices = Sets.newHashSet(); + Set indices = new HashSet<>(); for (AliasActions aliasAction : request.aliasActions()) { for (String index : aliasAction.indices()) { indices.add(index); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java index dd50f688003..72fe0553f60 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.create; -import com.google.common.collect.Sets; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; import org.elasticsearch.cluster.block.ClusterBlock; @@ -28,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportMessage; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -47,11 +47,11 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ private final Map mappings = new HashMap<>(); - private final Set aliases = Sets.newHashSet(); + private final Set aliases = new HashSet<>(); private final Map customs = new HashMap<>(); - private final Set blocks = Sets.newHashSet(); + private final Set blocks = new HashSet<>(); CreateIndexClusterStateUpdateRequest(TransportMessage originalMessage, String cause, String index, boolean updateAllTypes) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index c5ade4fc5fd..48342f90295 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.indices.create; import com.google.common.base.Charsets; -import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; @@ -45,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -72,7 +72,7 @@ public class CreateIndexRequest extends AcknowledgedRequest private final Map mappings = new HashMap<>(); - private final Set aliases = Sets.newHashSet(); + private final Set aliases = new HashSet<>(); private final Map customs = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 06a9112614b..979e51f83c7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.segments; -import com.google.common.collect.Sets; import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -35,6 +34,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -60,7 +60,7 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont } Map indicesSegments = new HashMap<>(); - Set indices = Sets.newHashSet(); + Set indices = new HashSet<>(); for (ShardSegments shard : shards) { indices.add(shard.getShardRouting().getIndex()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 6725850f369..5cb94b2dfa8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.indices.stats; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.routing.ShardRouting; @@ -34,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -87,7 +87,7 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten } Map indicesStats = new HashMap<>(); - Set indices = Sets.newHashSet(); + Set indices = new HashSet<>(); for (ShardStats shard : shards) { indices.add(shard.getShardRouting().getIndex()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 9b9dbbe9630..fe42f7ece52 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -42,10 +42,10 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.io.IOException; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; @@ -70,7 +70,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest mappings = new HashMap<>(); - private final Set aliases = newHashSet(); + private final Set aliases = new HashSet<>(); private Map customs = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java index 5cb60844d5b..5ace6641352 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.upgrade.get; -import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -54,7 +54,7 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte } Map indicesUpgradeStats = new HashMap<>(); - Set indices = Sets.newHashSet(); + Set indices = new HashSet<>(); for (ShardUpgradeStatus shard : shards) { indices.add(shard.getIndex()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index d1bee20f9ca..55c389d9e80 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -46,12 +46,11 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; - /** * Upgrade index/indices action. */ @@ -158,7 +157,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction indicesWithMissingPrimaries(ClusterState clusterState, String[] concreteIndices) { - Set indices = newHashSet(); + Set indices = new HashSet<>(); RoutingTable routingTable = clusterState.routingTable(); for (String index : concreteIndices) { IndexRoutingTable indexRoutingTable = routingTable.index(index); diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index a3033c5b4c9..fdc9a4174ec 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.termvectors; -import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequestValidationException; @@ -33,6 +32,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java index 90621407b23..a5c71e3aa67 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.FileVisitResult; @@ -70,21 +71,43 @@ public class JarHell { } /** - * Checks the current classloader for duplicate classes + * Checks the current classpath for duplicate classes * @throws IllegalStateException if jar hell was found */ public static void checkJarHell() throws Exception { ClassLoader loader = JarHell.class.getClassLoader(); - if (loader instanceof URLClassLoader == false) { - return; - } ESLogger logger = Loggers.getLogger(JarHell.class); if (logger.isDebugEnabled()) { logger.debug("java.class.path: {}", System.getProperty("java.class.path")); logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path")); - logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs())); + if (loader instanceof URLClassLoader ) { + logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs())); + } } - checkJarHell(((URLClassLoader) loader).getURLs()); + checkJarHell(parseClassPath()); + } + + /** + * Parses the classpath into a set of URLs + */ + @SuppressForbidden(reason = "resolves against CWD because that is how classpaths work") + public static URL[] parseClassPath() { + String elements[] = System.getProperty("java.class.path").split(System.getProperty("path.separator")); + URL urlElements[] = new URL[elements.length]; + for (int i = 0; i < elements.length; i++) { + String element = elements[i]; + // empty classpath element behaves like CWD. + if (element.isEmpty()) { + element = System.getProperty("user.dir"); + } + try { + urlElements[i] = PathUtils.get(element).toUri().toURL(); + } catch (MalformedURLException e) { + // should not happen, as we use the filesystem API + throw new RuntimeException(e); + } + } + return urlElements; } /** diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 4b32baa8582..a6bcb2f3732 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -24,7 +24,6 @@ import org.elasticsearch.env.Environment; import java.io.*; import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.AccessMode; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; @@ -121,8 +120,8 @@ final class Security { private static final Map SPECIAL_JARS; static { Map m = new IdentityHashMap<>(); - m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core"); - m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock"); + m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core"); + m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock"); SPECIAL_JARS = Collections.unmodifiableMap(m); } @@ -133,27 +132,21 @@ final class Security { */ @SuppressForbidden(reason = "proper use of URL") static void setCodebaseProperties() { - ClassLoader loader = Security.class.getClassLoader(); - if (loader instanceof URLClassLoader) { - for (URL url : ((URLClassLoader)loader).getURLs()) { - for (Map.Entry e : SPECIAL_JARS.entrySet()) { - if (e.getKey().matcher(url.getPath()).matches()) { - String prop = e.getValue(); - if (System.getProperty(prop) != null) { - throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop)); - } - System.setProperty(prop, url.toString()); + for (URL url : JarHell.parseClassPath()) { + for (Map.Entry e : SPECIAL_JARS.entrySet()) { + if (e.getKey().matcher(url.getPath()).matches()) { + String prop = e.getValue(); + if (System.getProperty(prop) != null) { + throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop)); } + System.setProperty(prop, url.toString()); } } - for (String prop : SPECIAL_JARS.values()) { - if (System.getProperty(prop) == null) { - System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all" - } + } + for (String prop : SPECIAL_JARS.values()) { + if (System.getProperty(prop) == null) { + System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all" } - } else { - // we could try to parse the classpath or something, but screw it for now. - throw new UnsupportedOperationException("Unsupported system classloader type: " + loader.getClass()); } } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index c60d6895d0f..64e2655e463 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -20,7 +20,6 @@ package org.elasticsearch.client.transport; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Sets; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -396,7 +395,7 @@ public class TransportClientNodesService extends AbstractComponent { protected void doSample() { // the nodes we are going to ping include the core listed nodes that were added // and the last round of discovered nodes - Set nodesToPing = Sets.newHashSet(); + Set nodesToPing = new HashSet<>(); for (DiscoveryNode node : listedNodes) { nodesToPing.add(node); } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 4720bb087dc..0cb46cbc248 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -90,6 +90,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.search.SearchService; +import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -275,6 +276,7 @@ public class ClusterModule extends AbstractModule { registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); registerIndexDynamicSetting(IndicesRequestCache.DEPRECATED_INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); registerIndexDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME); + registerIndexDynamicSetting(DefaultSearchContext.MAX_RESULT_WINDOW, Validator.POSITIVE_INTEGER); } public void registerIndexDynamicSetting(String setting, Validator validator) { diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 1724c4179aa..8d902f678c0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.block; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; @@ -31,6 +30,7 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -270,7 +270,7 @@ public class ClusterBlocks extends AbstractDiffable { public static class Builder { - private Set global = Sets.newHashSet(); + private Set global = new HashSet<>(); private Map> indices = new HashMap<>(); @@ -281,7 +281,7 @@ public class ClusterBlocks extends AbstractDiffable { global.addAll(blocks.global()); for (Map.Entry> entry : blocks.indices().entrySet()) { if (!indices.containsKey(entry.getKey())) { - indices.put(entry.getKey(), Sets.newHashSet()); + indices.put(entry.getKey(), new HashSet<>()); } indices.get(entry.getKey()).addAll(entry.getValue()); } @@ -319,7 +319,7 @@ public class ClusterBlocks extends AbstractDiffable { public Builder addIndexBlock(String index, ClusterBlock block) { if (!indices.containsKey(index)) { - indices.put(index, Sets.newHashSet()); + indices.put(index, new HashSet<>()); } indices.get(index).add(block); return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index 4f0539e5b6a..5b70f6820b1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import com.google.common.collect.Sets; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; @@ -29,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 975ebbb8066..f5b163cd040 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Sets; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterService; @@ -39,6 +38,7 @@ import org.elasticsearch.indices.InvalidIndexTemplateException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -76,7 +76,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { - Set templateNames = Sets.newHashSet(); + Set templateNames = new HashSet<>(); for (ObjectCursor cursor : currentState.metaData().templates().keys()) { String templateName = cursor.value; if (Regex.simpleMatch(request.name, templateName)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index a79136127f5..c8b78b0dddb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -20,8 +20,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.google.common.collect.Sets; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; @@ -49,6 +47,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -182,7 +181,7 @@ public class MetaDataMappingService extends AbstractComponent { // we need to create the index here, and add the current mapping to it, so we can merge indexService = indicesService.createIndex(indexMetaData.index(), indexMetaData.settings(), currentState.nodes().localNode().id()); removeIndex = true; - Set typesToIntroduce = Sets.newHashSet(); + Set typesToIntroduce = new HashSet<>(); for (MappingTask task : tasks) { if (task instanceof UpdateTask) { typesToIntroduce.add(((UpdateTask) task).type); @@ -223,7 +222,7 @@ public class MetaDataMappingService extends AbstractComponent { boolean dirty = false; String index = indexService.index().name(); // keep track of what we already refreshed, no need to refresh it again... - Set processedRefreshes = Sets.newHashSet(); + Set processedRefreshes = new HashSet<>(); for (MappingTask task : tasks) { if (task instanceof RefreshTask) { RefreshTask refreshTask = (RefreshTask) task; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 46656338862..65d862c4540 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -19,13 +19,16 @@ package org.elasticsearch.cluster.metadata; -import com.google.common.collect.Sets; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; @@ -41,7 +44,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.settings.IndexDynamicSettings; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -185,8 +194,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements final Settings closeSettings = updatedSettingsBuilder.build(); - final Set removedSettings = Sets.newHashSet(); - final Set errors = Sets.newHashSet(); + final Set removedSettings = new HashSet<>(); + final Set errors = new HashSet<>(); for (Map.Entry setting : updatedSettingsBuilder.internalMap().entrySet()) { if (!dynamicSettings.hasDynamicSetting(setting.getKey())) { removedSettings.add(setting.getKey()); @@ -225,8 +234,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // allow to change any settings to a close index, and only allow dynamic settings to be changed // on an open index - Set openIndices = Sets.newHashSet(); - Set closeIndices = Sets.newHashSet(); + Set openIndices = new HashSet<>(); + Set closeIndices = new HashSet<>(); for (String index : actualIndices) { if (currentState.metaData().index(index).state() == IndexMetaData.State.OPEN) { openIndices.add(index); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 2987cdf1855..a909b5550fe 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; -import com.google.common.collect.Sets; import com.google.common.collect.UnmodifiableIterator; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.AbstractDiffable; @@ -37,6 +36,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; @@ -137,7 +137,7 @@ public class IndexRoutingTable extends AbstractDiffable imple // check the number of shards if (indexMetaData.numberOfShards() != shards().size()) { - Set expected = Sets.newHashSet(); + Set expected = new HashSet<>(); for (int i = 0; i < indexMetaData.numberOfShards(); i++) { expected.add(i); } @@ -176,7 +176,7 @@ public class IndexRoutingTable extends AbstractDiffable imple * @return number of distinct nodes this index has at least one shard allocated on */ public int numberOfNodesShardsAreAllocatedOn(String... excludedNodes) { - Set nodes = Sets.newHashSet(); + Set nodes = new HashSet<>(); for (IndexShardRoutingTable shardRoutingTable : this) { for (ShardRouting shardRouting : shardRoutingTable) { if (shardRouting.assignedToNode()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 1f9a3461297..55301b4efb4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -20,12 +20,12 @@ package org.elasticsearch.cluster.routing; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 41459c95bc8..0a2a5c96606 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -35,14 +35,13 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Predicate; -import static com.google.common.collect.Sets.newHashSet; - /** * {@link RoutingNodes} represents a copy the routing information contained in * the {@link ClusterState cluster state}. @@ -704,7 +703,7 @@ public class RoutingNodes implements Iterable { int inactivePrimaryCount = 0; int inactiveShardCount = 0; int relocating = 0; - final Set seenShards = newHashSet(); + final Set seenShards = new HashSet<>(); Map indicesAndShards = new HashMap<>(); for (RoutingNode node : routingNodes) { for (ShardRouting shard : node) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index f87f07dfb31..c0004c9b288 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -188,7 +188,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { if (assignedShard.relocating()) { RoutingNode relocationNode = allocation.routingNodes().node(assignedShard.relocatingNodeId()); shardPerAttribute.addTo(relocationNode.node().attributes().get(awarenessAttribute), 1); - } else if (assignedShard.started()) { + } else if (assignedShard.started() || assignedShard.initializing()) { RoutingNode routingNode = allocation.routingNodes().node(assignedShard.currentNodeId()); shardPerAttribute.addTo(routingNode.node().attributes().get(awarenessAttribute), 1); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index b7ee93e5e23..9a6353a46f8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterInfo; @@ -36,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.RatioValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.node.settings.NodeSettingsService; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/BasePolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/BasePolygonBuilder.java index c37c8a66d6b..aabba08936d 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/BasePolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/BasePolygonBuilder.java @@ -19,11 +19,16 @@ package org.elasticsearch.common.geo.builders; -import com.google.common.collect.Sets; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Shape; -import com.vividsolutions.jts.geom.*; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.GeometryFactory; +import com.vividsolutions.jts.geom.LinearRing; +import com.vividsolutions.jts.geom.MultiPolygon; +import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/common/inject/AbstractModule.java b/core/src/main/java/org/elasticsearch/common/inject/AbstractModule.java index bfebed72a73..af908845509 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/AbstractModule.java +++ b/core/src/main/java/org/elasticsearch/common/inject/AbstractModule.java @@ -25,8 +25,8 @@ import org.elasticsearch.common.inject.spi.TypeConverter; import org.elasticsearch.common.inject.spi.TypeListener; import java.lang.annotation.Annotation; +import java.util.Objects; -import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** @@ -56,7 +56,7 @@ public abstract class AbstractModule implements Module { public final synchronized void configure(Binder builder) { checkState(this.binder == null, "Re-entry is not allowed."); - this.binder = checkNotNull(builder, "builder"); + this.binder = Objects.requireNonNull(builder, "builder"); try { configure(); } finally { diff --git a/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java b/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java index d51638d86b4..d70f47a760c 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InheritingState.java @@ -33,8 +33,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * @author jessewilson@google.com (Jesse Wilson) @@ -54,7 +53,7 @@ class InheritingState implements State { private final Object lock; InheritingState(State parent) { - this.parent = checkNotNull(parent, "parent"); + this.parent = Objects.requireNonNull(parent, "parent"); this.lock = (parent == State.NONE) ? this : parent.lock(); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/Initializer.java b/core/src/main/java/org/elasticsearch/common/inject/Initializer.java index 1cb91f43c8b..f1288c57c07 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Initializer.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Initializer.java @@ -23,11 +23,10 @@ import org.elasticsearch.common.inject.spi.InjectionPoint; import java.util.ArrayList; import java.util.IdentityHashMap; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.CountDownLatch; -import static com.google.common.base.Preconditions.checkNotNull; - /** * Manages and injects instances at injector-creation time. This is made more complicated by * instances that request other instances while they're being injected. We overcome this by using @@ -60,7 +59,7 @@ class Initializer { */ public Initializable requestInjection(InjectorImpl injector, T instance, Object source, Set injectionPoints) { - checkNotNull(source); + Objects.requireNonNull(source); // short circuit if the object has no injections if (instance == null @@ -118,8 +117,8 @@ class Initializer { public InjectableReference(InjectorImpl injector, T instance, Object source) { this.injector = injector; - this.instance = checkNotNull(instance, "instance"); - this.source = checkNotNull(source, "source"); + this.instance = Objects.requireNonNull(instance, "instance"); + this.source = Objects.requireNonNull(source, "source"); } public void validate(Errors errors) throws ErrorsException { diff --git a/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java b/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java index 5ac7934fc74..510d9b59c93 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InjectorShell.java @@ -35,9 +35,9 @@ import org.elasticsearch.common.inject.spi.TypeListenerBinding; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.logging.Logger; -import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static org.elasticsearch.common.inject.Scopes.SINGLETON; @@ -257,7 +257,7 @@ class InjectorShell { final Stage stage; private RootModule(Stage stage) { - this.stage = checkNotNull(stage, "stage"); + this.stage = Objects.requireNonNull(stage, "stage"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/Injectors.java b/core/src/main/java/org/elasticsearch/common/inject/Injectors.java index a51487e8770..40a0ae1aba8 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Injectors.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Injectors.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Sets; import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.inject.name.Names; import org.elasticsearch.common.inject.spi.Message; import java.lang.reflect.Type; +import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -69,7 +69,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set getInstancesOf(Injector injector, Class baseClass) { - Set answer = Sets.newHashSet(); + Set answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); @@ -93,7 +93,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set getInstancesOf(Injector injector, Matcher matcher) { - Set answer = Sets.newHashSet(); + Set answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); @@ -114,7 +114,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set> getProvidersOf(Injector injector, Matcher matcher) { - Set> answer = Sets.newHashSet(); + Set> answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); @@ -135,7 +135,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set> getProvidersOf(Injector injector, Class baseClass) { - Set> answer = Sets.newHashSet(); + Set> answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); @@ -186,7 +186,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set> getBindingsOf(Injector injector, Matcher matcher) { - Set> answer = Sets.newHashSet(); + Set> answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); @@ -205,7 +205,7 @@ public class Injectors { * @return a set of objects returned from this injector */ public static Set> getBindingsOf(Injector injector, Class baseClass) { - Set> answer = Sets.newHashSet(); + Set> answer = new HashSet<>(); Set, Binding>> entries = injector.getBindings().entrySet(); for (Entry, Binding> entry : entries) { Key key = entry.getKey(); diff --git a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java index 9cdc4e42fb9..d748cec6ad7 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/InternalFactoryToProviderAdapter.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.inject; import org.elasticsearch.common.inject.internal.*; import org.elasticsearch.common.inject.spi.Dependency; -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * @author crazybob@google.com (Bob Lee) @@ -35,8 +35,8 @@ class InternalFactoryToProviderAdapter implements InternalFactory { public InternalFactoryToProviderAdapter( Initializable> initializable, Object source) { - this.initializable = checkNotNull(initializable, "provider"); - this.source = checkNotNull(source, "source"); + this.initializable = Objects.requireNonNull(initializable, "provider"); + this.source = Objects.requireNonNull(source, "source"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/Key.java b/core/src/main/java/org/elasticsearch/common/inject/Key.java index 7de5735caa1..e2bbcf840b6 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/Key.java +++ b/core/src/main/java/org/elasticsearch/common/inject/Key.java @@ -22,9 +22,9 @@ import org.elasticsearch.common.inject.internal.ToStringBuilder; import java.lang.annotation.Annotation; import java.lang.reflect.Type; +import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; /** * Binding key consisting of an injection type and an optional annotation. @@ -343,7 +343,7 @@ public class Key { * Gets the strategy for an annotation. */ static AnnotationStrategy strategyFor(Annotation annotation) { - checkNotNull(annotation, "annotation"); + Objects.requireNonNull(annotation, "annotation"); Class annotationType = annotation.annotationType(); ensureRetainedAtRuntime(annotationType); ensureIsBindingAnnotation(annotationType); @@ -359,7 +359,7 @@ public class Key { * Gets the strategy for an annotation type. */ static AnnotationStrategy strategyFor(Class annotationType) { - checkNotNull(annotationType, "annotation type"); + Objects.requireNonNull(annotationType, "annotation type"); ensureRetainedAtRuntime(annotationType); ensureIsBindingAnnotation(annotationType); return new AnnotationTypeStrategy(annotationType, null); @@ -414,7 +414,7 @@ public class Key { final Annotation annotation; AnnotationInstanceStrategy(Annotation annotation) { - this.annotation = checkNotNull(annotation, "annotation"); + this.annotation = Objects.requireNonNull(annotation, "annotation"); } @Override @@ -467,7 +467,7 @@ public class Key { AnnotationTypeStrategy(Class annotationType, Annotation annotation) { - this.annotationType = checkNotNull(annotationType, "annotation type"); + this.annotationType = Objects.requireNonNull(annotationType, "annotation type"); this.annotation = annotation; } diff --git a/core/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java b/core/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java index 019d04ce0c7..187db3b9b81 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java +++ b/core/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java @@ -21,8 +21,7 @@ import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.spi.ScopeBinding; import java.lang.annotation.Annotation; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Handles {@link Binder#bindScope} commands. @@ -52,11 +51,11 @@ class ScopeBindingProcessor extends AbstractProcessor { // Go ahead and bind anyway so we don't get collateral errors. } - Scope existing = injector.state.getScope(checkNotNull(annotationType, "annotation type")); + Scope existing = injector.state.getScope(Objects.requireNonNull(annotationType, "annotation type")); if (existing != null) { errors.duplicateScopes(existing, annotationType, scope); } else { - injector.state.putAnnotation(annotationType, checkNotNull(scope, "scope")); + injector.state.putAnnotation(annotationType, Objects.requireNonNull(scope, "scope")); } return true; diff --git a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java index b83df0914d6..de9101d747d 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java +++ b/core/src/main/java/org/elasticsearch/common/inject/TypeLiteral.java @@ -21,11 +21,10 @@ import org.elasticsearch.common.inject.util.Types; import java.lang.reflect.*; import java.util.Arrays; -import java.util.Collections; import java.util.List; +import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; import static org.elasticsearch.common.inject.internal.MoreTypes.canonicalize; /** @@ -85,7 +84,7 @@ public class TypeLiteral { */ @SuppressWarnings("unchecked") TypeLiteral(Type type) { - this.type = canonicalize(checkNotNull(type, "type")); + this.type = canonicalize(Objects.requireNonNull(type, "type")); this.rawType = (Class) MoreTypes.getRawType(this.type); this.hashCode = MoreTypes.hashCode(this.type); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/WeakKeySet.java b/core/src/main/java/org/elasticsearch/common/inject/WeakKeySet.java index bff939dd139..f13ff347275 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/WeakKeySet.java +++ b/core/src/main/java/org/elasticsearch/common/inject/WeakKeySet.java @@ -16,8 +16,7 @@ package org.elasticsearch.common.inject; -import com.google.common.collect.Sets; - +import java.util.HashSet; import java.util.Set; /** @@ -34,7 +33,7 @@ final class WeakKeySet { * keys whose class names are equal but class loaders are different. This shouldn't be an issue * in practice. */ - private Set backingSet = Sets.newHashSet(); + private Set backingSet = new HashSet<>(); public boolean add(Key key) { return backingSet.add(key.toString()); diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java b/core/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java index 65b7a17d5ca..e6c3b1c9523 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java @@ -24,8 +24,7 @@ import org.elasticsearch.common.inject.spi.InstanceBinding; import java.lang.annotation.Annotation; import java.util.List; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Bind a value or constant. @@ -73,7 +72,7 @@ public abstract class AbstractBindingBuilder { * Sets the binding to a copy with the specified annotation on the bound key */ protected BindingImpl annotatedWithInternal(Class annotationType) { - checkNotNull(annotationType, "annotationType"); + Objects.requireNonNull(annotationType, "annotationType"); checkNotAnnotated(); return setBinding(binding.withKey( Key.get(this.binding.getKey().getTypeLiteral(), annotationType))); @@ -83,20 +82,20 @@ public abstract class AbstractBindingBuilder { * Sets the binding to a copy with the specified annotation on the bound key */ protected BindingImpl annotatedWithInternal(Annotation annotation) { - checkNotNull(annotation, "annotation"); + Objects.requireNonNull(annotation, "annotation"); checkNotAnnotated(); return setBinding(binding.withKey( Key.get(this.binding.getKey().getTypeLiteral(), annotation))); } public void in(final Class scopeAnnotation) { - checkNotNull(scopeAnnotation, "scopeAnnotation"); + Objects.requireNonNull(scopeAnnotation, "scopeAnnotation"); checkNotScoped(); setBinding(getBinding().withScoping(Scoping.forAnnotation(scopeAnnotation))); } public void in(final Scope scope) { - checkNotNull(scope, "scope"); + Objects.requireNonNull(scope, "scope"); checkNotScoped(); setBinding(getBinding().withScoping(Scoping.forInstance(scope))); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/BindingBuilder.java b/core/src/main/java/org/elasticsearch/common/inject/internal/BindingBuilder.java index bbec0113042..45a125966d5 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/BindingBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/BindingBuilder.java @@ -25,10 +25,9 @@ import org.elasticsearch.common.inject.spi.Message; import java.lang.annotation.Annotation; import java.util.List; +import java.util.Objects; import java.util.Set; -import static com.google.common.base.Preconditions.checkNotNull; - /** * Bind a non-constant key. * @@ -65,7 +64,7 @@ public class BindingBuilder extends AbstractBindingBuilder @Override public BindingBuilder to(Key linkedKey) { - checkNotNull(linkedKey, "linkedKey"); + Objects.requireNonNull(linkedKey, "linkedKey"); checkNotTargetted(); BindingImpl base = getBinding(); setBinding(new LinkedBindingImpl<>( @@ -100,7 +99,7 @@ public class BindingBuilder extends AbstractBindingBuilder @Override public BindingBuilder toProvider(Provider provider) { - checkNotNull(provider, "provider"); + Objects.requireNonNull(provider, "provider"); checkNotTargetted(); // lookup the injection points, adding any errors to the binder's errors list @@ -127,7 +126,7 @@ public class BindingBuilder extends AbstractBindingBuilder @Override public BindingBuilder toProvider(Key> providerKey) { - checkNotNull(providerKey, "providerKey"); + Objects.requireNonNull(providerKey, "providerKey"); checkNotTargetted(); BindingImpl base = getBinding(); diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/ExposureBuilder.java b/core/src/main/java/org/elasticsearch/common/inject/internal/ExposureBuilder.java index 354b27d66e4..6b5f5f95a97 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/ExposureBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/ExposureBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.inject.Key; import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder; import java.lang.annotation.Annotation; +import java.util.Objects; /** * For private binder's expose() method. @@ -44,14 +45,14 @@ public class ExposureBuilder implements AnnotatedElementBuilder { @Override public void annotatedWith(Class annotationType) { - com.google.common.base.Preconditions.checkNotNull(annotationType, "annotationType"); + Objects.requireNonNull(annotationType, "annotationType"); checkNotAnnotated(); key = Key.get(key.getTypeLiteral(), annotationType); } @Override public void annotatedWith(Annotation annotation) { - com.google.common.base.Preconditions.checkNotNull(annotation, "annotation"); + Objects.requireNonNull(annotation, "annotation"); checkNotAnnotated(); key = Key.get(key.getTypeLiteral(), annotation); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java b/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java index 65005992438..db0afe95d3a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/Join.java @@ -22,8 +22,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.Map; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Utility for joining pieces of text separated by a delimiter. It can handle @@ -97,7 +96,7 @@ public final class Join { */ public static String join( String delimiter, @Nullable Object firstToken, Object... otherTokens) { - checkNotNull(otherTokens); + Objects.requireNonNull(otherTokens); return join(delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); } @@ -207,7 +206,7 @@ public final class Join { */ public static T join(T appendable, String delimiter, @Nullable Object firstToken, Object... otherTokens) { - checkNotNull(otherTokens); + Objects.requireNonNull(otherTokens); return join(appendable, delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); } @@ -232,8 +231,8 @@ public final class Join { /* This method is the workhorse of the class */ - checkNotNull(appendable); - checkNotNull(delimiter); + Objects.requireNonNull(appendable); + Objects.requireNonNull(delimiter); if (tokens.hasNext()) { try { appendOneToken(appendable, tokens.next()); @@ -268,9 +267,9 @@ public final class Join { */ public static T join(T appendable, String keyValueSeparator, String entryDelimiter, Map map) { - checkNotNull(appendable); - checkNotNull(keyValueSeparator); - checkNotNull(entryDelimiter); + Objects.requireNonNull(appendable); + Objects.requireNonNull(keyValueSeparator); + Objects.requireNonNull(entryDelimiter); Iterator> entries = map.entrySet().iterator(); if (entries.hasNext()) { try { diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/MatcherAndConverter.java b/core/src/main/java/org/elasticsearch/common/inject/internal/MatcherAndConverter.java index 7ee9313041f..fbafd4c2d28 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/MatcherAndConverter.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/MatcherAndConverter.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.inject.spi.TypeConverter; -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * @author crazybob@google.com (Bob Lee) @@ -33,8 +33,8 @@ public final class MatcherAndConverter { public MatcherAndConverter(Matcher> typeMatcher, TypeConverter typeConverter, Object source) { - this.typeMatcher = checkNotNull(typeMatcher, "type matcher"); - this.typeConverter = checkNotNull(typeConverter, "converter"); + this.typeMatcher = Objects.requireNonNull(typeMatcher, "type matcher"); + this.typeConverter = Objects.requireNonNull(typeConverter, "converter"); this.source = source; } diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java b/core/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java index 8587a540989..6eb6f404fae 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java @@ -31,7 +31,6 @@ import java.util.NoSuchElementException; import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; /** * Static methods for working with types that we aren't publishing in the @@ -316,7 +315,7 @@ public class MoreTypes { * Returns {@code Field.class}, {@code Method.class} or {@code Constructor.class}. */ public static Class memberType(Member member) { - checkNotNull(member, "member"); + Objects.requireNonNull(member, "member"); if (member instanceof MemberImpl) { return ((MemberImpl) member).memberType; @@ -355,7 +354,7 @@ public class MoreTypes { } public static String memberKey(Member member) { - checkNotNull(member, "member"); + Objects.requireNonNull(member, "member"); return ""; } @@ -456,7 +455,7 @@ public class MoreTypes { this.rawType = canonicalize(rawType); this.typeArguments = typeArguments.clone(); for (int t = 0; t < this.typeArguments.length; t++) { - checkNotNull(this.typeArguments[t], "type parameter"); + Objects.requireNonNull(this.typeArguments[t], "type parameter"); checkNotPrimitive(this.typeArguments[t], "type parameters"); this.typeArguments[t] = canonicalize(this.typeArguments[t]); } @@ -566,14 +565,14 @@ public class MoreTypes { checkArgument(upperBounds.length == 1, "Must have exactly one upper bound."); if (lowerBounds.length == 1) { - checkNotNull(lowerBounds[0], "lowerBound"); + Objects.requireNonNull(lowerBounds[0], "lowerBound"); checkNotPrimitive(lowerBounds[0], "wildcard bounds"); checkArgument(upperBounds[0] == Object.class, "bounded both ways"); this.lowerBound = canonicalize(lowerBounds[0]); this.upperBound = Object.class; } else { - checkNotNull(upperBounds[0], "upperBound"); + Objects.requireNonNull(upperBounds[0], "upperBound"); checkNotPrimitive(upperBounds[0], "wildcard bounds"); this.lowerBound = null; this.upperBound = canonicalize(upperBounds[0]); diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java b/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java index 47b1f53d364..5d2bbad2b2c 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/PrivateElementsImpl.java @@ -30,10 +30,10 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** @@ -64,7 +64,7 @@ public final class PrivateElementsImpl implements PrivateElements { private Injector injector; public PrivateElementsImpl(Object source) { - this.source = checkNotNull(source, "source"); + this.source = Objects.requireNonNull(source, "source"); } @Override @@ -89,7 +89,7 @@ public final class PrivateElementsImpl implements PrivateElements { public void initInjector(Injector injector) { checkState(this.injector == null, "injector already initialized"); - this.injector = checkNotNull(injector, "injector"); + this.injector = Objects.requireNonNull(injector, "injector"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java b/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java index 9884b88f43a..aa556edb372 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java +++ b/core/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethodsModule.java @@ -32,8 +32,7 @@ import java.lang.reflect.Member; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Creates bindings to methods annotated with {@literal @}{@link Provides}. Use the scope and @@ -47,7 +46,7 @@ public final class ProviderMethodsModule implements Module { private final TypeLiteral typeLiteral; private ProviderMethodsModule(Object delegate) { - this.delegate = checkNotNull(delegate, "delegate"); + this.delegate = Objects.requireNonNull(delegate, "delegate"); this.typeLiteral = TypeLiteral.get(this.delegate.getClass()); } diff --git a/core/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java b/core/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java index 2c31bfc27e0..7e663db2281 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java +++ b/core/src/main/java/org/elasticsearch/common/inject/matcher/Matchers.java @@ -22,9 +22,9 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Method; +import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; /** * Matcher implementations. Supports matching classes and methods. @@ -73,7 +73,7 @@ public class Matchers { final Matcher delegate; private Not(Matcher delegate) { - this.delegate = checkNotNull(delegate, "delegate"); + this.delegate = Objects.requireNonNull(delegate, "delegate"); } @Override @@ -121,7 +121,7 @@ public class Matchers { private final Class annotationType; public AnnotatedWithType(Class annotationType) { - this.annotationType = checkNotNull(annotationType, "annotation type"); + this.annotationType = Objects.requireNonNull(annotationType, "annotation type"); checkForRuntimeRetention(annotationType); } @@ -163,7 +163,7 @@ public class Matchers { private final Annotation annotation; public AnnotatedWith(Annotation annotation) { - this.annotation = checkNotNull(annotation, "annotation"); + this.annotation = Objects.requireNonNull(annotation, "annotation"); checkForRuntimeRetention(annotation.annotationType()); } @@ -205,7 +205,7 @@ public class Matchers { private final Class superclass; public SubclassesOf(Class superclass) { - this.superclass = checkNotNull(superclass, "superclass"); + this.superclass = Objects.requireNonNull(superclass, "superclass"); } @Override @@ -244,7 +244,7 @@ public class Matchers { private final Object value; public Only(Object value) { - this.value = checkNotNull(value, "value"); + this.value = Objects.requireNonNull(value, "value"); } @Override @@ -283,7 +283,7 @@ public class Matchers { private final Object value; public IdenticalTo(Object value) { - this.value = checkNotNull(value, "value"); + this.value = Objects.requireNonNull(value, "value"); } @Override @@ -323,7 +323,7 @@ public class Matchers { private final String packageName; public InPackage(Package targetPackage) { - this.targetPackage = checkNotNull(targetPackage, "package"); + this.targetPackage = Objects.requireNonNull(targetPackage, "package"); this.packageName = targetPackage.getName(); } @@ -410,7 +410,7 @@ public class Matchers { private final Matcher> returnType; public Returns(Matcher> returnType) { - this.returnType = checkNotNull(returnType, "return type matcher"); + this.returnType = Objects.requireNonNull(returnType, "return type matcher"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index a5646315e49..50c87d7795a 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/core/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -39,6 +39,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; +import java.util.Objects; import java.util.Set; /** @@ -208,10 +209,10 @@ public abstract class Multibinder { private RealMultibinder(Binder binder, TypeLiteral elementType, String setName, Key> setKey) { - this.binder = checkNotNull(binder, "binder"); - this.elementType = checkNotNull(elementType, "elementType"); - this.setName = checkNotNull(setName, "setName"); - this.setKey = checkNotNull(setKey, "setKey"); + this.binder = Objects.requireNonNull(binder, "binder"); + this.elementType = Objects.requireNonNull(elementType, "elementType"); + this.setName = Objects.requireNonNull(setName, "setName"); + this.setKey = Objects.requireNonNull(setKey, "setKey"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/name/NamedImpl.java b/core/src/main/java/org/elasticsearch/common/inject/name/NamedImpl.java index 5f6a5c2cc14..8cf7af12f03 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/name/NamedImpl.java +++ b/core/src/main/java/org/elasticsearch/common/inject/name/NamedImpl.java @@ -18,15 +18,14 @@ package org.elasticsearch.common.inject.name; import java.io.Serializable; import java.lang.annotation.Annotation; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; class NamedImpl implements Named, Serializable { private final String value; public NamedImpl(String value) { - this.value = checkNotNull(value, "name"); + this.value = Objects.requireNonNull(value, "name"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index b4d548d5754..9c50a1aa68c 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -16,7 +16,6 @@ package org.elasticsearch.common.inject.spi; -import com.google.common.collect.Sets; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Binding; @@ -49,6 +48,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -141,7 +141,7 @@ public final class Elements { private RecordingBinder(Stage stage) { this.stage = stage; - this.modules = Sets.newHashSet(); + this.modules = new HashSet<>(); this.elements = new ArrayList<>(); this.source = null; this.sourceProvider = new SourceProvider().plusSkippedClasses( @@ -172,7 +172,7 @@ public final class Elements { */ private RecordingBinder(RecordingBinder parent, PrivateElementsImpl privateElements) { this.stage = parent.stage; - this.modules = Sets.newHashSet(); + this.modules = new HashSet<>(); this.elements = privateElements.getElementsMutable(); this.source = parent.source; this.sourceProvider = parent.sourceProvider; diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionRequest.java b/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionRequest.java index 0db580ec7c4..f52e6c387e1 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionRequest.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/InjectionRequest.java @@ -20,10 +20,9 @@ import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.ConfigurationException; import org.elasticsearch.common.inject.TypeLiteral; +import java.util.Objects; import java.util.Set; -import static com.google.common.base.Preconditions.checkNotNull; - /** * A request to inject the instance fields and methods of an instance. Requests are created * explicitly in a module using {@link org.elasticsearch.common.inject.Binder#requestInjection(Object) @@ -41,9 +40,9 @@ public final class InjectionRequest implements Element { private final T instance; public InjectionRequest(Object source, TypeLiteral type, T instance) { - this.source = checkNotNull(source, "source"); - this.type = checkNotNull(type, "type"); - this.instance = checkNotNull(instance, "instance"); + this.source = Objects.requireNonNull(source, "source"); + this.type = Objects.requireNonNull(type, "type"); + this.instance = Objects.requireNonNull(instance, "instance"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/MembersInjectorLookup.java b/core/src/main/java/org/elasticsearch/common/inject/spi/MembersInjectorLookup.java index f5b8f983fe2..f53d66265c2 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/MembersInjectorLookup.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/MembersInjectorLookup.java @@ -20,7 +20,8 @@ import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.MembersInjector; import org.elasticsearch.common.inject.TypeLiteral; -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; + import static com.google.common.base.Preconditions.checkState; /** @@ -40,8 +41,8 @@ public final class MembersInjectorLookup implements Element { private MembersInjector delegate; public MembersInjectorLookup(Object source, TypeLiteral type) { - this.source = checkNotNull(source, "source"); - this.type = checkNotNull(type, "type"); + this.source = Objects.requireNonNull(source, "source"); + this.type = Objects.requireNonNull(type, "type"); } @Override @@ -68,7 +69,7 @@ public final class MembersInjectorLookup implements Element { */ public void initializeDelegate(MembersInjector delegate) { checkState(this.delegate == null, "delegate already initialized"); - this.delegate = checkNotNull(delegate, "delegate"); + this.delegate = Objects.requireNonNull(delegate, "delegate"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java index 796aa80bcd8..37aa3b9d0db 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Message.java @@ -27,8 +27,6 @@ import java.util.Collections; import java.util.List; import java.util.Objects; -import static com.google.common.base.Preconditions.checkNotNull; - /** * An error message and the context in which it occurred. Messages are usually created internally by * Guice and its extensions. Messages can be created explicitly in a module using {@link @@ -52,7 +50,7 @@ public final class Message implements Serializable, Element { */ public Message(List sources, String message, Throwable cause) { this.sources = Collections.unmodifiableList(sources); - this.message = checkNotNull(message, "message"); + this.message = Objects.requireNonNull(message, "message"); this.cause = cause; } diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/ProviderLookup.java b/core/src/main/java/org/elasticsearch/common/inject/spi/ProviderLookup.java index 06a732b192a..61ae67a6e30 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/ProviderLookup.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/ProviderLookup.java @@ -20,7 +20,8 @@ import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Key; import org.elasticsearch.common.inject.Provider; -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; + import static com.google.common.base.Preconditions.checkState; /** @@ -64,8 +65,8 @@ public final class ProviderLookup implements Element { private Provider delegate; public ProviderLookup(Object source, Key key) { - this.source = checkNotNull(source, "source"); - this.key = checkNotNull(key, "key"); + this.source = Objects.requireNonNull(source, "source"); + this.key = Objects.requireNonNull(key, "key"); } @Override @@ -89,7 +90,7 @@ public final class ProviderLookup implements Element { */ public void initializeDelegate(Provider delegate) { checkState(this.delegate == null, "delegate already initialized"); - this.delegate = checkNotNull(delegate, "delegate"); + this.delegate = Objects.requireNonNull(delegate, "delegate"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java b/core/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java index 74d3d6c8bc0..9db84af19a9 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java @@ -20,8 +20,7 @@ import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Scope; import java.lang.annotation.Annotation; - -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Registration of a scope annotation with the scope that implements it. Instances are created @@ -40,9 +39,9 @@ public final class ScopeBinding implements Element { private final Scope scope; ScopeBinding(Object source, Class annotationType, Scope scope) { - this.source = checkNotNull(source, "source"); - this.annotationType = checkNotNull(annotationType, "annotationType"); - this.scope = checkNotNull(scope, "scope"); + this.source = Objects.requireNonNull(source, "source"); + this.annotationType = Objects.requireNonNull(annotationType, "annotationType"); + this.scope = Objects.requireNonNull(scope, "scope"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/StaticInjectionRequest.java b/core/src/main/java/org/elasticsearch/common/inject/spi/StaticInjectionRequest.java index 1d11f32e042..b7a052561df 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/StaticInjectionRequest.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/StaticInjectionRequest.java @@ -19,10 +19,9 @@ package org.elasticsearch.common.inject.spi; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.ConfigurationException; +import java.util.Objects; import java.util.Set; -import static com.google.common.base.Preconditions.checkNotNull; - /** * A request to inject the static fields and methods of a type. Requests are created * explicitly in a module using {@link org.elasticsearch.common.inject.Binder#requestStaticInjection(Class[]) @@ -38,8 +37,8 @@ public final class StaticInjectionRequest implements Element { private final Class type; StaticInjectionRequest(Object source, Class type) { - this.source = checkNotNull(source, "source"); - this.type = checkNotNull(type, "type"); + this.source = Objects.requireNonNull(source, "source"); + this.type = Objects.requireNonNull(type, "type"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/TypeConverterBinding.java b/core/src/main/java/org/elasticsearch/common/inject/spi/TypeConverterBinding.java index 484a11b63dd..84215c7e5df 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/spi/TypeConverterBinding.java +++ b/core/src/main/java/org/elasticsearch/common/inject/spi/TypeConverterBinding.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.matcher.Matcher; -import static com.google.common.base.Preconditions.checkNotNull; +import java.util.Objects; /** * Registration of type converters for matching target types. Instances are created @@ -39,9 +39,9 @@ public final class TypeConverterBinding implements Element { TypeConverterBinding(Object source, Matcher> typeMatcher, TypeConverter typeConverter) { - this.source = checkNotNull(source, "source"); - this.typeMatcher = checkNotNull(typeMatcher, "typeMatcher"); - this.typeConverter = checkNotNull(typeConverter, "typeConverter"); + this.source = Objects.requireNonNull(source, "source"); + this.typeMatcher = Objects.requireNonNull(typeMatcher, "typeMatcher"); + this.typeConverter = Objects.requireNonNull(typeConverter, "typeConverter"); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java b/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java index e950dc26348..6e0fdca22f2 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java +++ b/core/src/main/java/org/elasticsearch/common/inject/util/Modules.java @@ -17,7 +17,6 @@ package org.elasticsearch.common.inject.util; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Binding; @@ -36,6 +35,7 @@ import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -151,8 +151,8 @@ public final class Modules { final List elements = Elements.getElements(baseModules); final List overrideElements = Elements.getElements(overrides); - final Set overriddenKeys = Sets.newHashSet(); - final Set> overridesScopeAnnotations = Sets.newHashSet(); + final Set overriddenKeys = new HashSet<>(); + final Set> overridesScopeAnnotations = new HashSet<>(); // execute the overrides module, keeping track of which keys and scopes are bound new ModuleWriter(binder()) { @@ -201,7 +201,7 @@ public final class Modules { PrivateBinder privateBinder = binder.withSource(privateElements.getSource()) .newPrivateBinder(); - Set> skippedExposes = Sets.newHashSet(); + Set> skippedExposes = new HashSet<>(); for (Key key : privateElements.getExposedKeys()) { if (overriddenKeys.remove(key)) { diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index e6265ab01ed..5d798635863 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.io; import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; import org.elasticsearch.common.util.Callback; import java.io.BufferedReader; @@ -33,6 +32,7 @@ import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.List; +import java.util.Objects; /** * Simple utility methods for file and stream copying. @@ -66,8 +66,8 @@ public abstract class Streams { * @throws IOException in case of I/O errors */ public static long copy(InputStream in, OutputStream out, byte[] buffer) throws IOException { - Preconditions.checkNotNull(in, "No InputStream specified"); - Preconditions.checkNotNull(out, "No OutputStream specified"); + Objects.requireNonNull(in, "No InputStream specified"); + Objects.requireNonNull(out, "No OutputStream specified"); try { long byteCount = 0; int bytesRead; @@ -100,8 +100,8 @@ public abstract class Streams { * @throws IOException in case of I/O errors */ public static void copy(byte[] in, OutputStream out) throws IOException { - Preconditions.checkNotNull(in, "No input byte array specified"); - Preconditions.checkNotNull(out, "No OutputStream specified"); + Objects.requireNonNull(in, "No input byte array specified"); + Objects.requireNonNull(out, "No OutputStream specified"); try { out.write(in); } finally { @@ -128,8 +128,8 @@ public abstract class Streams { * @throws IOException in case of I/O errors */ public static int copy(Reader in, Writer out) throws IOException { - Preconditions.checkNotNull(in, "No Reader specified"); - Preconditions.checkNotNull(out, "No Writer specified"); + Objects.requireNonNull(in, "No Reader specified"); + Objects.requireNonNull(out, "No Writer specified"); try { int byteCount = 0; char[] buffer = new char[BUFFER_SIZE]; @@ -163,8 +163,8 @@ public abstract class Streams { * @throws IOException in case of I/O errors */ public static void copy(String in, Writer out) throws IOException { - Preconditions.checkNotNull(in, "No input String specified"); - Preconditions.checkNotNull(out, "No Writer specified"); + Objects.requireNonNull(in, "No input String specified"); + Objects.requireNonNull(out, "No Writer specified"); try { out.write(in); } finally { diff --git a/core/src/main/java/org/elasticsearch/common/joda/DateMathParser.java b/core/src/main/java/org/elasticsearch/common/joda/DateMathParser.java index 7246db21344..65ec7e7c2b4 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/DateMathParser.java +++ b/core/src/main/java/org/elasticsearch/common/joda/DateMathParser.java @@ -24,10 +24,9 @@ import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; +import java.util.Objects; import java.util.concurrent.Callable; -import static com.google.common.base.Preconditions.checkNotNull; - /** * A parser for date/time formatted text with optional date math. * @@ -40,7 +39,7 @@ public class DateMathParser { private final FormatDateTimeFormatter dateTimeFormatter; public DateMathParser(FormatDateTimeFormatter dateTimeFormatter) { - checkNotNull(dateTimeFormatter); + Objects.requireNonNull(dateTimeFormatter); this.dateTimeFormatter = dateTimeFormatter; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java index 5106cdaf979..d7892c9b8c4 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllEntries.java @@ -25,12 +25,11 @@ import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; - /** * */ @@ -136,7 +135,7 @@ public class AllEntries extends Reader { } public Set fields() { - Set fields = newHashSet(); + Set fields = new HashSet<>(); for (Entry entry : entries) { fields.add(entry.name()); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 9853659ca06..a59af2c7f51 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -64,6 +64,9 @@ public final class AllTermQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1f) { + return super.rewrite(reader); + } boolean fieldExists = false; boolean hasPayloads = false; for (LeafReaderContext context : reader.leaves()) { @@ -98,7 +101,7 @@ public final class AllTermQuery extends Query { final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field()); final TermStatistics termStats = searcher.termStatistics(term, termStates); final Similarity similarity = searcher.getSimilarity(needsScores); - final SimWeight stats = similarity.computeWeight(getBoost(), collectionStats, termStats); + final SimWeight stats = similarity.computeWeight(collectionStats, termStats); return new Weight(this) { @Override diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 3990ccae8ef..3d870bc0794 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -120,6 +120,9 @@ public class MultiPhrasePrefixQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1.0F) { + return super.rewrite(reader); + } if (termArrays.isEmpty()) { return new MatchNoDocsQuery(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/BoostScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/BoostScoreFunction.java deleted file mode 100644 index 13b45261284..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/BoostScoreFunction.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene.search.function; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Explanation; - -/** - * - */ -@Deprecated -public class BoostScoreFunction extends ScoreFunction { - - public static final String BOOST_WEIGHT_ERROR_MESSAGE = "'boost_factor' and 'weight' cannot be used together. Use 'weight'."; - - private final float boost; - - public BoostScoreFunction(float boost) { - super(CombineFunction.MULT); - this.boost = boost; - } - - public float getBoost() { - return boost; - } - - @Override - public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) { - return new LeafScoreFunction() { - - @Override - public double score(int docId, float subQueryScore) { - return boost; - } - - @Override - public Explanation explainScore(int docId, Explanation subQueryScore) { - return Explanation.match(boost, "static boost factor", Explanation.match(boost, "boostFactor")); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - - @Override - public String toString() { - return "boost[" + boost + "]"; - } - -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java index 30c8f01b709..41a5b859520 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java @@ -24,8 +24,8 @@ import org.apache.lucene.search.Explanation; public enum CombineFunction { MULT { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat(queryBoost * queryScore * Math.min(funcScore, maxBoost)); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat(queryScore * Math.min(funcScore, maxBoost)); } @Override @@ -34,21 +34,20 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost) * queryExpl.getValue(); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); Explanation minExpl = Explanation.match( Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, boostExpl); - return Explanation.match(score, "function score, product of:", - queryExpl, minExpl, Explanation.match(queryBoost, "queryBoost")); + return Explanation.match(queryExpl.getValue() * minExpl.getValue(), + "function score, product of:", queryExpl, minExpl); } }, REPLACE { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat(queryBoost * Math.min(funcScore, maxBoost)); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat(Math.min(funcScore, maxBoost)); } @Override @@ -57,22 +56,19 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); - Explanation minExpl = Explanation.match( + return Explanation.match( Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, boostExpl); - return Explanation.match(score, "function score, product of:", - minExpl, Explanation.match(queryBoost, "queryBoost")); } }, SUM { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat(queryBoost * (queryScore + Math.min(funcScore, maxBoost))); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat(queryScore + Math.min(funcScore, maxBoost)); } @Override @@ -81,21 +77,18 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = queryBoost * (Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); - Explanation sumExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of", + return Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of", queryExpl, minExpl); - return Explanation.match(score, "function score, product of:", - sumExpl, Explanation.match(queryBoost, "queryBoost")); } }, AVG { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat((queryBoost * (Math.min(funcScore, maxBoost) + queryScore) / 2.0)); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat((Math.min(funcScore, maxBoost) + queryScore) / 2.0); } @Override @@ -104,22 +97,19 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = toFloat(queryBoost * (queryExpl.getValue() + Math.min(funcExpl.getValue(), maxBoost)) / 2.0); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); - Explanation avgExpl = Explanation.match( + return Explanation.match( toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of", queryExpl, minExpl); - return Explanation.match(score, "function score, product of:", - avgExpl, Explanation.match(queryBoost, "queryBoost")); } }, MIN { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat(queryBoost * Math.min(queryScore, Math.min(funcScore, maxBoost))); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat(Math.min(queryScore, Math.min(funcScore, maxBoost))); } @Override @@ -128,23 +118,20 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = toFloat(queryBoost * Math.min(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost))); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); - Explanation outerMinExpl = Explanation.match( + return Explanation.match( Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of", queryExpl, innerMinExpl); - return Explanation.match(score, "function score, product of:", - outerMinExpl, Explanation.match(queryBoost, "queryBoost")); } }, MAX { @Override - public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { - return toFloat(queryBoost * (Math.max(queryScore, Math.min(funcScore, maxBoost)))); + public float combine(double queryScore, double funcScore, double maxBoost) { + return toFloat(Math.max(queryScore, Math.min(funcScore, maxBoost))); } @Override @@ -153,21 +140,18 @@ public enum CombineFunction { } @Override - public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { - float score = toFloat(queryBoost * Math.max(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost))); + public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( Math.min(funcExpl.getValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); - Explanation outerMaxExpl = Explanation.match( + return Explanation.match( Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of:", queryExpl, innerMinExpl); - return Explanation.match(score, "function score, product of:", - outerMaxExpl, Explanation.match(queryBoost, "queryBoost")); } }; - public abstract float combine(double queryBoost, double queryScore, double funcScore, double maxBoost); + public abstract float combine(double queryScore, double funcScore, double maxBoost); public abstract String getName(); @@ -181,5 +165,5 @@ public enum CombineFunction { return Double.compare(floatVersion, input) == 0 || input == 0.0d ? 0 : 1.d - (floatVersion) / input; } - public abstract Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost); + public abstract Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java index b4ddaf2acf8..709c7df7898 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CustomBoostFactorScorer.java @@ -21,13 +21,11 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; import java.io.IOException; abstract class CustomBoostFactorScorer extends Scorer { - final float subQueryBoost; final Scorer scorer; final float maxBoost; final CombineFunction scoreCombiner; @@ -43,7 +41,6 @@ abstract class CustomBoostFactorScorer extends Scorer { } else { nextDoc = new MinScoreNextDoc(); } - this.subQueryBoost = w.getQuery().getBoost(); this.scorer = scorer; this.maxBoost = maxBoost; this.scoreCombiner = scoreCombiner; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index e95da1d8731..ebe25b85d80 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -114,6 +114,9 @@ public class FiltersFunctionScoreQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1.0F) { + return super.rewrite(reader); + } Query newQ = subQuery.rewrite(reader); if (newQ == subQuery) return this; @@ -158,14 +161,12 @@ public class FiltersFunctionScoreQuery extends Query { @Override public float getValueForNormalization() throws IOException { - float sum = subQueryWeight.getValueForNormalization(); - sum *= getBoost() * getBoost(); - return sum; + return subQueryWeight.getValueForNormalization(); } @Override - public void normalize(float norm, float topLevelBoost) { - subQueryWeight.normalize(norm, topLevelBoost * getBoost()); + public void normalize(float norm, float boost) { + subQueryWeight.normalize(norm, boost); } @Override @@ -219,10 +220,7 @@ public class FiltersFunctionScoreQuery extends Query { } } if (filterExplanations.size() == 0) { - float sc = getBoost() * subQueryExpl.getValue(); - return Explanation.match(sc, "function score, no filter match, product of:", - subQueryExpl, - Explanation.match(getBoost(), "queryBoost")); + return subQueryExpl; } // Second: Compute the factor that would have been computed by the @@ -266,7 +264,7 @@ public class FiltersFunctionScoreQuery extends Query { CombineFunction.toFloat(factor), "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", filterExplanations); - return combineFunction.explain(getBoost(), subQueryExpl, factorExplanation, maxBoost); + return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost); } } @@ -348,7 +346,7 @@ public class FiltersFunctionScoreQuery extends Query { } } } - return scoreCombiner.combine(subQueryBoost, subQueryScore, factor, maxBoost); + return scoreCombiner.combine(subQueryScore, factor, maxBoost); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 448eda8154c..2a8829632d4 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -76,6 +76,9 @@ public class FunctionScoreQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1.0F) { + return super.rewrite(reader); + } Query newQ = subQuery.rewrite(reader); if (newQ == subQuery) { return this; @@ -117,14 +120,12 @@ public class FunctionScoreQuery extends Query { @Override public float getValueForNormalization() throws IOException { - float sum = subQueryWeight.getValueForNormalization(); - sum *= getBoost() * getBoost(); - return sum; + return subQueryWeight.getValueForNormalization(); } @Override - public void normalize(float norm, float topLevelBoost) { - subQueryWeight.normalize(norm, topLevelBoost * getBoost()); + public void normalize(float norm, float boost) { + subQueryWeight.normalize(norm, boost); } @Override @@ -148,7 +149,7 @@ public class FunctionScoreQuery extends Query { } if (function != null) { Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); - return combineFunction.explain(getBoost(), subQueryExpl, functionExplanation, maxBoost); + return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost); } else { return subQueryExpl; } @@ -174,9 +175,9 @@ public class FunctionScoreQuery extends Query { // are needed float score = needsScores ? scorer.score() : 0f; if (function == null) { - return subQueryBoost * score; + return score; } else { - return scoreCombiner.combine(subQueryBoost, score, + return scoreCombiner.combine(score, function.score(scorer.docID(), score), maxBoost); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java index 2def0712b1a..c585da42814 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java @@ -35,9 +35,6 @@ public class WeightFactorFunction extends ScoreFunction { public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { super(CombineFunction.MULT); - if (scoreFunction instanceof BoostScoreFunction) { - throw new IllegalArgumentException(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE); - } if (scoreFunction == null) { this.scoreFunction = SCORE_ONE; } else { diff --git a/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java b/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java index a4b067e1cea..11f7d8b67ce 100644 --- a/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java +++ b/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.property; -import com.google.common.base.Preconditions; import org.elasticsearch.common.Strings; import java.util.HashSet; +import java.util.Objects; import java.util.Properties; import java.util.Set; @@ -61,8 +61,8 @@ public class PropertyPlaceholder { */ public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix, boolean ignoreUnresolvablePlaceholders) { - Preconditions.checkNotNull(placeholderPrefix, "Argument 'placeholderPrefix' must not be null."); - Preconditions.checkNotNull(placeholderSuffix, "Argument 'placeholderSuffix' must not be null."); + Objects.requireNonNull(placeholderPrefix, "Argument 'placeholderPrefix' must not be null."); + Objects.requireNonNull(placeholderSuffix, "Argument 'placeholderSuffix' must not be null."); this.placeholderPrefix = placeholderPrefix; this.placeholderSuffix = placeholderSuffix; this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders; @@ -77,7 +77,7 @@ public class PropertyPlaceholder { * @return the supplied value with placeholders replaced inline. */ public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { - Preconditions.checkNotNull(value, "Argument 'value' must not be null."); + Objects.requireNonNull(value, "Argument 'value' must not be null."); return parseStringValue(value, placeholderResolver, new HashSet()); } diff --git a/core/src/main/java/org/elasticsearch/common/recycler/Recyclers.java b/core/src/main/java/org/elasticsearch/common/recycler/Recyclers.java index daff885733a..5bac8f7bcfd 100644 --- a/core/src/main/java/org/elasticsearch/common/recycler/Recyclers.java +++ b/core/src/main/java/org/elasticsearch/common/recycler/Recyclers.java @@ -20,8 +20,8 @@ package org.elasticsearch.common.recycler; import com.carrotsearch.hppc.BitMixer; -import com.google.common.collect.Queues; -import org.elasticsearch.ElasticsearchException; + +import java.util.ArrayDeque; public enum Recyclers { ; @@ -44,7 +44,7 @@ public enum Recyclers { * Return a recycler based on a deque. */ public static Recycler deque(Recycler.C c, int limit) { - return new DequeRecycler<>(c, Queues.newArrayDeque(), limit); + return new DequeRecycler<>(c, new ArrayDeque<>(), limit); } /** diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java index b26039141c2..e840722f48e 100644 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.util; import com.google.common.base.Charsets; -import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexWriter; @@ -38,13 +37,25 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.*; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.shard.ShardStateMetaData; import java.io.IOException; import java.io.PrintStream; -import java.nio.file.*; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** */ @@ -343,7 +354,7 @@ public class MultiDataPathUpgrader { } private static Set findAllShardIds(Path... locations) throws IOException { - final Set shardIds = Sets.newHashSet(); + final Set shardIds = new HashSet<>(); for (final Path location : locations) { if (Files.isDirectory(location)) { shardIds.addAll(findAllShardsForIndex(location)); diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java index a48eb60a2d8..ae806713bfd 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/BaseFuture.java @@ -22,11 +22,10 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.Nullable; import org.elasticsearch.transport.Transports; +import java.util.Objects; import java.util.concurrent.*; import java.util.concurrent.locks.AbstractQueuedSynchronizer; -import static com.google.common.base.Preconditions.checkNotNull; - /** * An abstract implementation of the {@link com.google.common.util.concurrent.ListenableFuture} interface. This * class is preferable to {@link java.util.concurrent.FutureTask} for two @@ -178,7 +177,7 @@ public abstract class BaseFuture implements Future { * @throws Error if the throwable was an {@link Error}. */ protected boolean setException(Throwable throwable) { - boolean result = sync.setException(checkNotNull(throwable)); + boolean result = sync.setException(Objects.requireNonNull(throwable)); if (result) { done(); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java index 791afa3b917..74a1c13ce3d 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.util.concurrent; +import java.util.Collections; import java.util.Deque; import java.util.Queue; import java.util.Set; @@ -29,7 +30,6 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.LinkedTransferQueue; -import com.google.common.collect.Sets; /** * @@ -67,7 +67,7 @@ public abstract class ConcurrentCollections { } public static Set newConcurrentSet() { - return Sets.newSetFromMap(ConcurrentCollections.newConcurrentMap()); + return Collections.newSetFromMap(ConcurrentCollections.newConcurrentMap()); } public static Queue newQueue() { diff --git a/core/src/main/java/org/elasticsearch/common/util/set/Sets.java b/core/src/main/java/org/elasticsearch/common/util/set/Sets.java new file mode 100644 index 00000000000..4b323c42a37 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/util/set/Sets.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util.set; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +public final class Sets { + private Sets() { + } + + public static HashSet newHashSet(Iterator iterator) { + Objects.requireNonNull(iterator); + HashSet set = new HashSet<>(); + while (iterator.hasNext()) { + set.add(iterator.next()); + } + return set; + } + + public static HashSet newHashSet(Iterable iterable) { + Objects.requireNonNull(iterable); + return iterable instanceof Collection ? new HashSet<>((Collection)iterable) : newHashSet(iterable.iterator()); + } + + public static HashSet newHashSet(T... elements) { + Objects.requireNonNull(elements); + HashSet set = new HashSet<>(elements.length); + Collections.addAll(set, elements); + return set; + } + + public static Set newConcurrentHashSet() { + return Collections.newSetFromMap(new ConcurrentHashMap<>()); + } + + public static boolean haveEmptyIntersection(Set left, Set right) { + Objects.requireNonNull(left); + Objects.requireNonNull(right); + return !left.stream().anyMatch(k -> right.contains(k)); + } + + public static Set difference(Set left, Set right) { + Objects.requireNonNull(left); + Objects.requireNonNull(right); + return left.stream().filter(k -> !right.contains(k)).collect(Collectors.toSet()); + } + + public static Set union(Set left, Set right) { + Objects.requireNonNull(left); + Objects.requireNonNull(right); + Set union = new HashSet<>(left); + union.addAll(right); + return union; + } +} diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index f1f69a65f26..03cdd4b886c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -20,13 +20,19 @@ package org.elasticsearch.discovery.local; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateNonMasterUpdateTask; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.IncompatibleClusterStateVersionException; +import org.elasticsearch.cluster.ProcessedClusterStateNonMasterUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -36,7 +42,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.discovery.*; +import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.transport.TransportService; @@ -48,7 +59,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.ClusterState.Builder; /** @@ -227,7 +237,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implem firstMaster.master = true; } - final Set newMembers = newHashSet(); + final Set newMembers = new HashSet<>(); for (LocalDiscovery discovery : clusterGroup.members()) { newMembers.add(discovery.localNode.id()); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 6b52531c18c..878e5bdb535 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.zen; -import com.google.common.collect.Sets; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -73,6 +72,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Queue; @@ -948,9 +948,9 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen } // nodes discovered during pinging - Set activeNodes = Sets.newHashSet(); + Set activeNodes = new HashSet<>(); // nodes discovered who has previously been part of the cluster and do not ping for the very first time - Set joinedOnceActiveNodes = Sets.newHashSet(); + Set joinedOnceActiveNodes = new HashSet<>(); if (localNode.masterNode()) { activeNodes.add(localNode); long joinsCounter = clusterJoinsCounter.get(); diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 725ec82ac8d..06562f82db5 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -20,12 +20,15 @@ package org.elasticsearch.env; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; - import com.google.common.primitives.Ints; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -34,7 +37,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; @@ -46,8 +48,21 @@ import org.elasticsearch.monitor.fs.FsProbe; import java.io.Closeable; import java.io.IOException; -import java.nio.file.*; -import java.util.*; +import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -643,7 +658,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { throw new IllegalStateException("node is not configured to store local location"); } assert assertEnvIsLocked(); - Set indices = Sets.newHashSet(); + Set indices = new HashSet<>(); for (NodePath nodePath : nodePaths) { Path indicesLocation = nodePath.indicesPath; if (Files.isDirectory(indicesLocation)) { @@ -673,7 +688,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { throw new IllegalStateException("node is not configured to store local location"); } assert assertEnvIsLocked(); - final Set shardIds = Sets.newHashSet(); + final Set shardIds = new HashSet<>(); String indexName = index.name(); for (final NodePath nodePath : nodePaths) { Path location = nodePath.indicesPath; diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java b/core/src/main/java/org/elasticsearch/http/HttpServerModule.java index 6030ac06bc8..49d67369643 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServerModule.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServerModule.java @@ -19,13 +19,14 @@ package org.elasticsearch.http; -import com.google.common.base.Preconditions; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.netty.NettyHttpServerTransport; +import java.util.Objects; + /** * */ @@ -50,8 +51,8 @@ public class HttpServerModule extends AbstractModule { } public void setHttpServerTransport(Class httpServerTransport, String source) { - Preconditions.checkNotNull(httpServerTransport, "Configured http server transport may not be null"); - Preconditions.checkNotNull(source, "Plugin, that changes transport may not be null"); + Objects.requireNonNull(httpServerTransport, "Configured http server transport may not be null"); + Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); logger.info("Using [{}] as http transport, overridden by [{}]", httpServerTransportClass.getName(), source); this.httpServerTransportClass = httpServerTransport; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 54cbccadf25..92434d340ca 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.engine; -import com.google.common.base.Preconditions; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexCommit; @@ -71,6 +69,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; @@ -100,8 +99,8 @@ public abstract class Engine implements Closeable { protected volatile Throwable failedEngine = null; protected Engine(EngineConfig engineConfig) { - Preconditions.checkNotNull(engineConfig.getStore(), "Store must be provided to the engine"); - Preconditions.checkNotNull(engineConfig.getDeletionPolicy(), "Snapshot deletion policy must be provided to the engine"); + Objects.requireNonNull(engineConfig.getStore(), "Store must be provided to the engine"); + Objects.requireNonNull(engineConfig.getDeletionPolicy(), "Snapshot deletion policy must be provided to the engine"); this.engineConfig = engineConfig; this.shardId = engineConfig.getShardId(); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java index e809a2e1610..89962093fb4 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java @@ -72,7 +72,7 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData { } public Collection simpleMatchToFullName(String pattern) { - Set fields = Sets.newHashSet(); + Set fields = new HashSet<>(); for (FieldMapper fieldMapper : this) { if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { fields.add(fieldMapper.fieldType().names().fullName()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 1dfca8b2949..54b2c98b71d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; @@ -68,6 +67,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.locks.ReentrantReadWriteLock; /** @@ -142,7 +142,7 @@ public class DocumentMapper implements ToXContent { } public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) { - Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); + Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, sourceTransforms, mapperService.mappingLock); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 8f51271d06a..db2919e0217 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.mapper; -import com.google.common.collect.Sets; - import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -34,20 +32,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import org.elasticsearch.percolator.PercolatorService; import java.io.Closeable; import java.io.IOException; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -169,7 +167,7 @@ class DocumentParser implements Closeable { } // apply doc boost if (context.docBoost() != 1.0f) { - Set encounteredFields = Sets.newHashSet(); + Set encounteredFields = new HashSet<>(); for (ParseContext.Document doc : context.docs()) { encounteredFields.clear(); for (IndexableField field : doc) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 4dfba263e23..7a416dbaac7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -21,12 +21,12 @@ package org.elasticsearch.index.mapper; import com.google.common.base.Function; import com.google.common.collect.Iterators; -import com.google.common.collect.Sets; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.regex.Regex; import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -152,7 +152,7 @@ class FieldTypeLookup implements Iterable { * Returns a list of the index names of a simple match regex like pattern against full name and index name. */ public Collection simpleMatchToIndexNames(String pattern) { - Set fields = Sets.newHashSet(); + Set fields = new HashSet<>(); for (MappedFieldType fieldType : this) { if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { fields.add(fieldType.names().indexName()); @@ -167,7 +167,7 @@ class FieldTypeLookup implements Iterable { * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ public Collection simpleMatchToFullName(String pattern) { - Set fields = Sets.newHashSet(); + Set fields = new HashSet<>(); for (MappedFieldType fieldType : this) { if (Regex.simpleMatch(pattern, fieldType.names().fullName())) { fields.add(fieldType.names().fullName()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 3027c53f2ac..3a3a8549151 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.mapper.core; -import com.google.common.collect.Sets; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.PostingsFormat; @@ -29,6 +28,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 9e24b50cc04..686cfcfe6e2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -219,6 +219,9 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1.0F) { + return super.rewrite(reader); + } return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 160c8ed61f9..b2e572f5291 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper.object; -import com.google.common.collect.Sets; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -38,6 +37,7 @@ import org.elasticsearch.index.settings.IndexSettings; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -66,7 +66,7 @@ public class RootObjectMapper extends ObjectMapper { protected final List dynamicTemplates = new ArrayList<>(); // we use this to filter out seen date formats, because we might get duplicates during merging - protected Set seenDateFormats = Sets.newHashSet(); + protected Set seenDateFormats = new HashSet<>(); protected List dynamicDateTimeFormatters = new ArrayList<>(); protected boolean dateDetection = Defaults.DATE_DETECTION; diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index 5de9a281674..4590b71c837 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -51,11 +51,6 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "has_parent"; + public static final boolean DEFAULT_SCORE = false; private final QueryBuilder query; private final String type; - private boolean score = false; + private boolean score = DEFAULT_SCORE; private QueryInnerHits innerHit; /** @@ -245,4 +246,4 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder facade to parse if available, - // or delay parsing if not. if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { iqb = parseContext.parseInnerQueryBuilder(); } else if ("inner_hits".equals(currentFieldName)) { @@ -70,12 +66,13 @@ public class HasParentQueryParser extends BaseQueryParser { if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { parentType = parser.text(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_FIELD)) { - // deprecated we use a boolean now - String scoreTypeValue = parser.text(); - if ("score".equals(scoreTypeValue)) { + String scoreModeValue = parser.text(); + if ("score".equals(scoreModeValue)) { score = true; - } else if ("none".equals(scoreTypeValue)) { + } else if ("none".equals(scoreModeValue)) { score = false; + } else { + throw new QueryParsingException(parseContext, "[has_parent] query does not support [" + scoreModeValue + "] as an option for score_mode"); } } else if ("score".equals(currentFieldName)) { score = parser.booleanValue(); diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index 3747855a832..07adeeceedb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import com.google.common.collect.Sets; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Fields; import org.apache.lucene.queries.TermsQuery; @@ -41,7 +40,13 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; import static org.elasticsearch.index.mapper.Uid.createUidAsBytes; @@ -178,7 +183,7 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp { likeItems.add(Item.parse(parser, parseContext.parseFieldMatcher(), new Item())); } } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.STOP_WORDS)) { - Set stopWords = Sets.newHashSet(); + Set stopWords = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { stopWords.add(parser.text()); } diff --git a/core/src/main/java/org/elasticsearch/index/query/ScoreType.java b/core/src/main/java/org/elasticsearch/index/query/ScoreType.java deleted file mode 100644 index 6286a9d5915..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/ScoreType.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.query; - - -/** - * Defines how scores from child documents are mapped into the parent document. - */ -public enum ScoreType { - /** - * Only the lowest score of all matching child documents is mapped into the - * parent. - */ - MIN, - /** - * Only the highest score of all matching child documents is mapped into the - * parent. - */ - MAX, - - /** - * The average score based on all matching child documents are mapped into - * the parent. - */ - AVG, - - /** - * The matching children scores is summed up and mapped into the parent. - */ - SUM, - - /** - * Scores are not taken into account - */ - NONE; - - - public static ScoreType fromString(String type) { - if ("none".equals(type)) { - return NONE; - } else if ("min".equals(type)) { - return MIN; - } else if ("max".equals(type)) { - return MAX; - } else if ("avg".equals(type)) { - return AVG; - } else if ("sum".equals(type)) { - return SUM; - } else if ("total".equals(type)) { // This name is consistent with: ScoreMode.Total - return SUM; - } - throw new IllegalArgumentException("No score type for child query [" + type + "] found"); - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 7580c842ed3..85bfe1e6157 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -89,8 +89,7 @@ import java.util.Locale; *

* See {@link GaussDecayFunctionBuilder} and {@link GaussDecayFunctionParser} * for an example. The parser furthermore needs to be registered in the - * {@link org.elasticsearch.index.query.functionscore.FunctionScoreModule - * FunctionScoreModule}. + * {@link org.elasticsearch.search.SearchModule SearchModule}. * * **/ diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index 3880592eee9..e1726f9da7a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -35,8 +35,6 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder 0) { throw new ElasticsearchParseException("failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", NAME, singleFunctionName, currentFieldName); @@ -197,12 +189,8 @@ public class FunctionScoreQueryParser implements QueryParser { return result; } - private void handleMisplacedFunctionsDeclaration(String errorString, String functionName) { - errorString = MISPLACED_FUNCTION_MESSAGE_PREFIX + errorString; - if (Arrays.asList(FactorParser.NAMES).contains(functionName)) { - errorString = errorString + MISPLACED_BOOST_FUNCTION_MESSAGE_SUFFIX; - } - throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, errorString); + private void handleMisplacedFunctionsDeclaration(String errorString) { + throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, MISPLACED_FUNCTION_MESSAGE_PREFIX + errorString); } private String parseFiltersAndFunctions(QueryShardContext context, XContentParser parser, diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java index ea8e255941e..23c1ca17f0f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.query.functionscore; import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder; -import org.elasticsearch.index.query.functionscore.factor.FactorBuilder; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder; @@ -29,8 +28,6 @@ import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBui import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import org.elasticsearch.script.Script; -import java.util.Map; - public class ScoreFunctionBuilders { public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale) { diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java index fe33532c347..37a6f80ecb4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; -import org.elasticsearch.index.query.functionscore.factor.FactorParser; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionParser; @@ -42,8 +41,7 @@ public class ScoreFunctionParserMapper { @Inject public ScoreFunctionParserMapper(Set parsers) { Map map = new HashMap<>(); - // build-in parsers - addParser(new FactorParser(), map); + // built-in parsers addParser(new ScriptScoreFunctionParser(), map); addParser(new GaussDecayFunctionParser(), map); addParser(new LinearDecayFunctionParser(), map); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorBuilder.java deleted file mode 100644 index 3a176c46877..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorBuilder.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query.functionscore.factor; - -import org.elasticsearch.common.lucene.search.function.BoostScoreFunction; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; - -import java.io.IOException; - -/** - * A query that simply applies the boost factor to another query (multiply it). - * - * - */ -@Deprecated -public class FactorBuilder extends ScoreFunctionBuilder { - - private Float boostFactor; - - /** - * Sets the boost factor for this query. - */ - public FactorBuilder boostFactor(float boost) { - this.boostFactor = new Float(boost); - return this; - } - - @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - if (boostFactor != null) { - builder.field("boost_factor", boostFactor.floatValue()); - } - } - - @Override - public String getName() { - return FactorParser.NAMES[0]; - } - - @Override - public ScoreFunctionBuilder setWeight(float weight) { - throw new IllegalArgumentException(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE); - } - - @Override - public void buildWeight(XContentBuilder builder) throws IOException { - //we do not want the weight to be written for boost_factor as it does not make sense to have it - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorParser.java deleted file mode 100644 index 2635c2bbf44..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/factor/FactorParser.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query.functionscore.factor; - -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.function.BoostScoreFunction; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.QueryParsingException; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; - -import java.io.IOException; - -/** - * - */ -@Deprecated -public class FactorParser implements ScoreFunctionParser { - - public static String[] NAMES = { "boost_factor", "boostFactor" }; - - @Inject - public FactorParser() { - } - - @Override - public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException { - float boostFactor = parser.floatValue(); - return new BoostScoreFunction(boostFactor); - } - - @Override - public String[] getNames() { - return NAMES; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8d8c3d9978f..d524c296b65 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -20,11 +20,10 @@ package org.elasticsearch.index.shard; import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.*; +import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; @@ -111,10 +110,7 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.io.PrintStream; import java.nio.channels.ClosedByInterruptException; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.Locale; -import java.util.Map; +import java.util.*; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; @@ -208,8 +204,8 @@ public class IndexShard extends AbstractIndexShardComponent { this.deletionPolicy = deletionPolicy; this.similarityService = similarityService; this.wrappingService = wrappingService; - Preconditions.checkNotNull(store, "Store must be provided to the index shard"); - Preconditions.checkNotNull(deletionPolicy, "Snapshot deletion policy must be provided to the index shard"); + Objects.requireNonNull(store, "Store must be provided to the index shard"); + Objects.requireNonNull(deletionPolicy, "Snapshot deletion policy must be provided to the index shard"); this.engineFactory = factory; this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle; this.indexSettingsService = indexSettingsService; diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index dfd6cdf6b50..2d12fab1637 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -19,12 +19,22 @@ package org.elasticsearch.index.store; -import com.google.common.collect.Sets; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.LockFactory; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.RateLimitedFSDirectory; +import org.apache.lucene.store.SimpleFSDirectory; +import org.apache.lucene.store.SimpleFSLockFactory; +import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.ShardPath; @@ -32,7 +42,6 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; -import java.util.Locale; import java.util.Set; /** diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index a2b12ea5e50..599c86097ff 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -73,7 +73,16 @@ public class JvmInfo implements Streamable, ToXContent { // ignore } info.inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]); - info.bootClassPath = runtimeMXBean.getBootClassPath(); + try { + info.bootClassPath = runtimeMXBean.getBootClassPath(); + } catch (UnsupportedOperationException e) { + // oracle java 9 + info.bootClassPath = System.getProperty("sun.boot.class.path"); + if (info.bootClassPath == null) { + // something else + info.bootClassPath = ""; + } + } info.classPath = runtimeMXBean.getClassPath(); info.systemProperties = runtimeMXBean.getSystemProperties(); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java index 03f262f883b..0e8a7379c01 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -227,6 +227,7 @@ public class OsStats implements Streamable, ToXContent { } } + // TODO: if values are -1, this should return -1 to show its unsupported? private static short calculatePercentage(long used, long max) { return max <= 0 ? 0 : (short) (Math.round((100d * used) / max)); } diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 15156179170..c155f9352a6 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -20,7 +20,6 @@ package org.elasticsearch.node.internal; import com.google.common.base.Charsets; -import com.google.common.collect.Sets; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -37,6 +36,7 @@ import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -105,7 +105,7 @@ public class InternalSettingsPreparer { } if (loadFromEnv) { boolean settingsFileFound = false; - Set foundSuffixes = Sets.newHashSet(); + Set foundSuffixes = new HashSet<>(); for (String allowedSuffix : ALLOWED_SUFFIXES) { Path path = environment.configFile().resolve("elasticsearch" + allowedSuffix); if (Files.exists(path)) { diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index ee9e6b392e0..3eded07712f 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -41,7 +41,6 @@ import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFileAttributeView; @@ -315,10 +314,7 @@ public class PluginManager { private void jarHellCheck(Path candidate, boolean isolated) throws IOException { // create list of current jars in classpath final List jars = new ArrayList<>(); - ClassLoader loader = PluginManager.class.getClassLoader(); - if (loader instanceof URLClassLoader) { - Collections.addAll(jars, ((URLClassLoader) loader).getURLs()); - } + jars.addAll(Arrays.asList(JarHell.parseClassPath())); // read existing bundles. this does some checks on the installation too. List bundles = PluginsService.getPluginBundles(environment.pluginsFile()); diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java b/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java index c24b823c959..db6afd17647 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java @@ -34,6 +34,7 @@ import org.elasticsearch.plugins.PluginManager.OutputMode; import java.net.MalformedURLException; import java.net.URL; +import java.net.URLDecoder; import java.util.Locale; import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; @@ -221,7 +222,7 @@ public class PluginManagerCliParser extends CliTool { if (name != null) { terminal.println("-> Installing " + Strings.coalesceToEmpty(name) + "..."); } else { - terminal.println("-> Installing from " + url + "..."); + terminal.println("-> Installing from " + URLDecoder.decode(url.toString(), "UTF-8") + "..."); } pluginManager.downloadAndExtract(name, terminal); return ExitStatus.OK; diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index c70349f9bf8..5834efc398d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -47,6 +47,7 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -331,12 +332,7 @@ public class PluginsService extends AbstractComponent { // pluginmanager does it, but we do it again, in case lusers mess with jar files manually try { final List jars = new ArrayList<>(); - ClassLoader parentLoader = getClass().getClassLoader(); - if (parentLoader instanceof URLClassLoader) { - for (URL url : ((URLClassLoader) parentLoader).getURLs()) { - jars.add(url); - } - } + jars.addAll(Arrays.asList(JarHell.parseClassPath())); jars.addAll(bundle.urls); JarHell.checkJarHell(jars.toArray(new URL[0])); } catch (Exception e) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index 06c5a224a81..aed9514e2c0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.admin.cluster.node.info; -import com.google.common.collect.Sets; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Client; @@ -27,8 +26,15 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestBuilderListener; import java.util.Set; diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 3530e889e9d..09edd81f323 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -192,6 +192,9 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell("indexing.index_total", "sibling:pri;alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); table.addCell("pri.indexing.index_total", "default:false;text-align:right;desc:number of indexing ops"); + table.addCell("indexing.index_failed", "sibling:pri;alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); + table.addCell("pri.indexing.index_failed", "default:false;text-align:right;desc:number of failed indexing ops"); + table.addCell("merges.current", "sibling:pri;alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); table.addCell("pri.merges.current", "default:false;text-align:right;desc:number of current merges"); @@ -403,6 +406,9 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexCount()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexCount()); + table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexFailedCount()); + table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexFailedCount()); + table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getCurrent()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getCurrentSize()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 1bc43b4c1b4..f29b3521caa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -166,6 +166,7 @@ public class RestNodesAction extends AbstractCatAction { table.addCell("indexing.index_current", "alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); table.addCell("indexing.index_time", "alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing"); table.addCell("indexing.index_total", "alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); + table.addCell("indexing.index_failed", "alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); table.addCell("merges.current", "alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); table.addCell("merges.current_docs", "alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); @@ -300,6 +301,7 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(indexingStats == null ? null : indexingStats.getTotal().getIndexCurrent()); table.addCell(indexingStats == null ? null : indexingStats.getTotal().getIndexTime()); table.addCell(indexingStats == null ? null : indexingStats.getTotal().getIndexCount()); + table.addCell(indexingStats == null ? null : indexingStats.getTotal().getIndexFailedCount()); MergeStats mergeStats = indicesStats == null ? null : indicesStats.getMerge(); table.addCell(mergeStats == null ? null : mergeStats.getCurrent()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index c6df5ee562f..5ec23c9b287 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -121,6 +121,7 @@ public class RestShardsAction extends AbstractCatAction { table.addCell("indexing.index_current", "alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); table.addCell("indexing.index_time", "alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing"); table.addCell("indexing.index_total", "alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); + table.addCell("indexing.index_failed", "alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); table.addCell("merges.current", "alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); table.addCell("merges.current_docs", "alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); @@ -255,6 +256,7 @@ public class RestShardsAction extends AbstractCatAction { table.addCell(shardStats == null ? null : shardStats.getIndexing().getTotal().getIndexCurrent()); table.addCell(shardStats == null ? null : shardStats.getIndexing().getTotal().getIndexTime()); table.addCell(shardStats == null ? null : shardStats.getIndexing().getTotal().getIndexCount()); + table.addCell(shardStats == null ? null : shardStats.getIndexing().getTotal().getIndexFailedCount()); table.addCell(shardStats == null ? null : shardStats.getMerge().getCurrent()); table.addCell(shardStats == null ? null : shardStats.getMerge().getCurrentNumDocs()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 0a76f1f7e86..6a1cd27e0b2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -244,5 +244,13 @@ public class AggregatorFactories { orderedPipelineAggregators.add(factory); } } + + AggregatorFactory[] getAggregatorFactories() { + return this.factories.toArray(new AggregatorFactory[this.factories.size()]); + } + + List getPipelineAggregatorFactories() { + return this.pipelineAggregatorFactories; + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java index bba7be2ad1f..257fef89cd2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java @@ -80,7 +80,7 @@ public class AggregatorParsers { /** * Returns the parser that is registered under the given pipeline aggregator * type. - * + * * @param type * The pipeline aggregator type * @return The parser associated with the given pipeline aggregator type. @@ -228,6 +228,10 @@ public class AggregatorParsers { throw new SearchParseException(context, "Aggregation [" + aggregationName + "] cannot define sub-aggregations", parser.getTokenLocation()); } + if (level == 0) { + pipelineAggregatorFactory + .validate(null, factories.getAggregatorFactories(), factories.getPipelineAggregatorFactories()); + } factories.addPipelineAggregator(pipelineAggregatorFactory); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java index a2d762461c0..6914d854100 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java @@ -79,15 +79,15 @@ public final class CustomQueryScorer extends QueryScorer { Map terms) throws IOException { if (query instanceof FunctionScoreQuery) { query = ((FunctionScoreQuery) query).getSubQuery(); - extract(query, terms); + extract(query, query.getBoost(), terms); } else if (query instanceof FiltersFunctionScoreQuery) { query = ((FiltersFunctionScoreQuery) query).getSubQuery(); - extract(query, terms); + extract(query, query.getBoost(), terms); } else if (query instanceof FilteredQuery) { query = ((FilteredQuery) query).getQuery(); - extract(query, terms); + extract(query, 1F, terms); } else { - extractWeightedTerms(terms, query); + extractWeightedTerms(terms, query, query.getBoost()); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java index 74723c7e3f5..12f376f3878 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.highlight; -import com.google.common.collect.Sets; import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentParser; @@ -30,6 +29,7 @@ import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -211,7 +211,7 @@ public class HighlighterParseElement implements SearchParseElement { } fieldOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()])); } else if ("matched_fields".equals(fieldName) || "matchedFields".equals(fieldName)) { - Set matchedFields = Sets.newHashSet(); + Set matchedFields = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { matchedFields.add(parser.text()); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index d3ea19e2098..981c2778491 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -20,12 +20,7 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Sort; +import org.apache.lucene.search.*; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -33,8 +28,8 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.lucene.search.function.BoostScoreFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; @@ -67,16 +62,26 @@ import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * */ public class DefaultSearchContext extends SearchContext { + /** + * Index setting describing the maximum value of from + size on a query. + */ + public static final String MAX_RESULT_WINDOW = "index.max_result_window"; + public static class Defaults { + /** + * Default maximum value of from + size on a query. 10,000 was chosen as + * a conservative default as it is sure to not cause trouble. Users can + * certainly profile their cluster and decide to set it to 100,000 + * safely. 1,000,000 is probably way to high for any cluster to set + * safely. + */ + public static final int MAX_RESULT_WINDOW = 10000; + } private final long id; private final ShardSearchRequest request; @@ -168,12 +173,20 @@ public class DefaultSearchContext extends SearchContext { */ @Override public void preProcess() { - if (!(from() == -1 && size() == -1)) { - // from and size have been set. - int numHits = from() + size(); - if (numHits < 0) { - String msg = "Result window is too large, from + size must be less than or equal to: [" + Integer.MAX_VALUE + "] but was [" + (((long) from()) + ((long) size())) + "]"; - throw new QueryPhaseExecutionException(this, msg); + if (scrollContext == null) { + long from = from() == -1 ? 0 : from(); + long size = size() == -1 ? 10 : size(); + long resultWindow = from + size; + // We need settingsService's view of the settings because its dynamic. + // indexService's isn't. + int maxResultWindow = indexService.settingsService().getSettings().getAsInt(MAX_RESULT_WINDOW, Defaults.MAX_RESULT_WINDOW); + + if (resultWindow > maxResultWindow) { + throw new QueryPhaseExecutionException(this, + "Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was [" + + resultWindow + "]. See the scroll api for a more efficient way to request large data sets. " + + "This limit can be set by changing the [" + DefaultSearchContext.MAX_RESULT_WINDOW + + "] index level parameter."); } } @@ -184,7 +197,7 @@ public class DefaultSearchContext extends SearchContext { parsedQuery(ParsedQuery.parsedMatchAllQuery()); } if (queryBoost() != 1.0f) { - parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new BoostScoreFunction(queryBoost)), parsedQuery())); + parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new WeightFactorFunction(queryBoost)), parsedQuery())); } Query searchFilter = searchFilter(types()); if (searchFilter != null) { diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 443e723b972..36167254842 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -81,6 +81,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -89,7 +90,6 @@ import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArrayList; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_INDEX_UUID; @@ -229,7 +229,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable()); final ImmutableMap shards; - Set aliases = newHashSet(); + Set aliases = new HashSet<>(); if (!renamedIndices.isEmpty()) { // We have some indices to restore ImmutableMap.Builder shardsBuilder = ImmutableMap.builder(); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 1e19633b02a..76485a0c4c3 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -64,13 +64,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.SnapshotsInProgress.completed; /** @@ -139,7 +139,7 @@ public class SnapshotsService extends AbstractLifecycleComponent snapshots(String repositoryName) { - Set snapshotSet = newHashSet(); + Set snapshotSet = new HashSet<>(); List entries = currentSnapshots(repositoryName, null); for (SnapshotsInProgress.Entry entry : entries) { snapshotSet.add(inProgressSnapshot(entry)); @@ -736,8 +736,8 @@ public class SnapshotsService extends AbstractLifecycleComponent, Set> indicesWithMissingShards(ImmutableMap shards, MetaData metaData) { - Set missing = newHashSet(); - Set closed = newHashSet(); + Set missing = new HashSet<>(); + Set closed = new HashSet<>(); for (ImmutableMap.Entry entry : shards.entrySet()) { if (entry.getValue().state() == State.MISSING) { if (metaData.hasIndex(entry.getKey().getIndex()) && metaData.index(entry.getKey().getIndex()).getState() == IndexMetaData.State.CLOSE) { diff --git a/core/src/main/java/org/elasticsearch/transport/TransportModule.java b/core/src/main/java/org/elasticsearch/transport/TransportModule.java index 69264014e47..abf90deee81 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportModule.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportModule.java @@ -19,7 +19,6 @@ package org.elasticsearch.transport; -import com.google.common.base.Preconditions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,6 +30,7 @@ import org.elasticsearch.transport.netty.NettyTransport; import java.util.HashMap; import java.util.Map; +import java.util.Objects; /** * @@ -107,15 +107,15 @@ public class TransportModule extends AbstractModule { } public void setTransportService(Class transportService, String source) { - Preconditions.checkNotNull(transportService, "Configured transport service may not be null"); - Preconditions.checkNotNull(source, "Plugin, that changes transport service may not be null"); + Objects.requireNonNull(transportService, "Configured transport service may not be null"); + Objects.requireNonNull(source, "Plugin, that changes transport service may not be null"); this.configuredTransportService = transportService; this.configuredTransportServiceSource = source; } public void setTransport(Class transport, String source) { - Preconditions.checkNotNull(transport, "Configured transport may not be null"); - Preconditions.checkNotNull(source, "Plugin, that changes transport may not be null"); + Objects.requireNonNull(transport, "Configured transport may not be null"); + Objects.requireNonNull(source, "Plugin, that changes transport may not be null"); this.configuredTransport = transport; this.configuredTransportSource = source; } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 4f984aa497b..aa88fba02e5 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -36,6 +36,10 @@ grant codeBase "${es.security.jar.lucene.core}" { permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; }; +//// test framework permissions. +//// These are mock objects and test management that we allow test framework libs +//// to provide on our behalf. But tests themselves cannot do this stuff! + grant codeBase "${es.security.jar.elasticsearch.securemock}" { // needed to support creation of mocks permission java.lang.RuntimePermission "reflectionFactoryAccess"; @@ -80,8 +84,6 @@ grant { permission java.lang.RuntimePermission "getProtectionDomain"; // reflection hacks: - // needed for mock filesystems in tests (to capture implCloseChannel) - permission java.lang.RuntimePermission "accessClassInPackage.sun.nio.ch"; // needed by groovy engine permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; // needed by aws core sdk (TODO: look into this) diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index eb3d870c2ad..6f5f85c599b 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.util.Constants; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -322,7 +323,12 @@ public class ESExceptionTests extends ESTestCase { } else { assertEquals(e.getCause().getClass(), NotSerializableExceptionWrapper.class); } - assertArrayEquals(e.getStackTrace(), ex.getStackTrace()); + // TODO: fix this test + // on java 9, expected: + // but was: + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + assertArrayEquals(e.getStackTrace(), ex.getStackTrace()); + } assertTrue(e.getStackTrace().length > 1); ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), t); ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), ex); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index eaa5ad0e337..1b26925db16 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -21,7 +21,8 @@ package org.elasticsearch; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParseException; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; + +import org.apache.lucene.util.Constants; import org.codehaus.groovy.runtime.typehandling.GroovyCastException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.RoutingMissingException; @@ -32,7 +33,12 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.IllegalShardRoutingStateException; +import org.elasticsearch.cluster.routing.RoutingTableValidation; +import org.elasticsearch.cluster.routing.RoutingValidationException; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -46,6 +52,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.util.CancellableThreadsTests; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.AlreadyExpiredException; import org.elasticsearch.index.Index; @@ -573,12 +580,15 @@ public class ExceptionSerializationTests extends ESTestCase { } Throwable deserialized = serialize(t); assertTrue(deserialized instanceof NotSerializableExceptionWrapper); - assertArrayEquals(t.getStackTrace(), deserialized.getStackTrace()); - assertEquals(t.getSuppressed().length, deserialized.getSuppressed().length); - if (t.getSuppressed().length > 0) { - assertTrue(deserialized.getSuppressed()[0] instanceof NotSerializableExceptionWrapper); - assertArrayEquals(t.getSuppressed()[0].getStackTrace(), deserialized.getSuppressed()[0].getStackTrace()); - assertTrue(deserialized.getSuppressed()[1] instanceof NullPointerException); + // TODO: fix this test for more java 9 differences + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + assertArrayEquals(t.getStackTrace(), deserialized.getStackTrace()); + assertEquals(t.getSuppressed().length, deserialized.getSuppressed().length); + if (t.getSuppressed().length > 0) { + assertTrue(deserialized.getSuppressed()[0] instanceof NotSerializableExceptionWrapper); + assertArrayEquals(t.getSuppressed()[0].getStackTrace(), deserialized.getSuppressed()[0].getStackTrace()); + assertTrue(deserialized.getSuppressed()[1] instanceof NullPointerException); + } } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 53f3a14fad9..86a9bbc1f3f 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.action.admin.indices.template.put; -import com.google.common.collect.Sets; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateFilter; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService.PutRequest; @@ -33,6 +31,7 @@ import org.junit.Test; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; @@ -82,7 +81,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { null, Version.CURRENT, null, - Sets.newHashSet(), + new HashSet<>(), null, null ); diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index 05037ebef37..6bbec12dc12 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.aliases; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -34,7 +33,6 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -49,12 +47,12 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.util.Arrays; +import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_METADATA_BLOCK; @@ -1050,7 +1048,7 @@ public class IndexAliasesIT extends ESIntegTestCase { private void assertHits(SearchHits hits, String... ids) { assertThat(hits.totalHits(), equalTo((long) ids.length)); - Set hitIds = newHashSet(); + Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.id()); } diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java index cdc3d634755..3d22f07a52d 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.benchmark.search.child; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -26,7 +27,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.ScoreType; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.node.Node; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -282,12 +282,12 @@ public class ChildSearchBenchmark { System.out.println("--> Running has_child query with score type"); // run parent child score query for (int j = 0; j < QUERY_WARMUP; j++) { - client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType(ScoreType.MAX)).execute().actionGet(); + client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode(ScoreMode.Max)).execute().actionGet(); } totalQueryTime = 0; for (int j = 0; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreType(ScoreType.MAX)).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode(ScoreMode.Max)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } @@ -297,7 +297,7 @@ public class ChildSearchBenchmark { totalQueryTime = 0; for (int j = 0; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.MAX)).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Max)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java index c44977b3410..388bf954822 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.benchmark.search.child; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -30,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.ScoreType; import org.elasticsearch.node.Node; import java.io.IOException; @@ -179,7 +179,7 @@ public class ChildSearchShortCircuitBenchmark { for (int i = 1; i < PARENT_COUNT; i *= 2) { for (int j = 0; j < QUERY_COUNT; j++) { SearchResponse searchResponse = client.prepareSearch(indexName) - .setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreType(ScoreType.MAX)) + .setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreMode(ScoreMode.Max)) .execute().actionGet(); if (searchResponse.getHits().totalHits() != i) { System.err.println("--> mismatch on hits"); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index a779db2326b..e31fc7d8b35 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -86,7 +86,7 @@ public class BootstrapForTesting { // initialize paths the same exact way as bootstrap. Permissions perms = new Permissions(); // add permissions to everything in classpath - for (URL url : ((URLClassLoader)BootstrapForTesting.class.getClassLoader()).getURLs()) { + for (URL url : JarHell.parseClassPath()) { Path path = PathUtils.get(url.toURI()); // resource itself perms.add(new FilePermission(path.toString(), "read,readlink")); @@ -97,6 +97,7 @@ public class BootstrapForTesting { String filename = path.getFileName().toString(); if (filename.contains("jython") && filename.endsWith(".jar")) { // just enough so it won't fail when it does not exist + perms.add(new FilePermission(path.getParent().toString(), "read,readlink")); perms.add(new FilePermission(path.getParent().resolve("Lib").toString(), "read,readlink")); } } @@ -114,7 +115,7 @@ public class BootstrapForTesting { perms.add(new FilePermission(coverageDir.resolve("jacoco-it.exec").toString(), "read,write")); } Policy.setPolicy(new ESPolicy(perms)); - System.setSecurityManager(new XTestSecurityManager()); + System.setSecurityManager(new TestSecurityManager()); Security.selfTest(); } catch (Exception e) { throw new RuntimeException("unable to install test security manager", e); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/XTestSecurityManager.java b/core/src/test/java/org/elasticsearch/bootstrap/XTestSecurityManager.java deleted file mode 100644 index c626274b9e9..00000000000 --- a/core/src/test/java/org/elasticsearch/bootstrap/XTestSecurityManager.java +++ /dev/null @@ -1,113 +0,0 @@ -package org.elasticsearch.bootstrap; - -import java.security.AccessController; -import java.security.PrivilegedAction; - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// the above license header is a lie, here is the real one. - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * A {@link SecurityManager} that prevents tests calling {@link System#exit(int)}. - * Only the test runner itself is allowed to exit the JVM. - * All other security checks are handled by the default security policy. - *

- * Use this with {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager}. - */ -// TODO: remove me when https://issues.apache.org/jira/browse/LUCENE-6794 is committed -public final class XTestSecurityManager extends SecurityManager { - - static final String JUNIT4_TEST_RUNNER_PACKAGE = "com.carrotsearch.ant.tasks.junit4."; - static final String ECLIPSE_TEST_RUNNER_PACKAGE = "org.eclipse.jdt.internal.junit.runner."; - static final String IDEA_TEST_RUNNER_PACKAGE = "com.intellij.rt.execution.junit."; - - /** - * Creates a new TestSecurityManager. This ctor is called on JVM startup, - * when {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager} - * is passed to JVM. - */ - public XTestSecurityManager() { - super(); - } - - /** - * {@inheritDoc} - *

This method inspects the stack trace and checks who is calling - * {@link System#exit(int)} and similar methods - * @throws SecurityException if the caller of this method is not the test runner itself. - */ - @Override - public void checkExit(final int status) { - AccessController.doPrivileged((PrivilegedAction) () -> { - final String systemClassName = System.class.getName(), - runtimeClassName = Runtime.class.getName(); - String exitMethodHit = null; - for (final StackTraceElement se : Thread.currentThread().getStackTrace()) { - final String className = se.getClassName(), methodName = se.getMethodName(); - if ( - ("exit".equals(methodName) || "halt".equals(methodName)) && - (systemClassName.equals(className) || runtimeClassName.equals(className)) - ) { - exitMethodHit = className + '#' + methodName + '(' + status + ')'; - continue; - } - - if (exitMethodHit != null) { - if (className.startsWith(JUNIT4_TEST_RUNNER_PACKAGE) || - className.startsWith(ECLIPSE_TEST_RUNNER_PACKAGE) || - className.startsWith(IDEA_TEST_RUNNER_PACKAGE)) { - // this exit point is allowed, we return normally from closure: - return /*void*/ null; - } else { - // anything else in stack trace is not allowed, break and throw SecurityException below: - break; - } - } - } - - if (exitMethodHit == null) { - // should never happen, only if JVM hides stack trace - replace by generic: - exitMethodHit = "JVM exit method"; - } - throw new SecurityException(exitMethodHit + " calls are not allowed because they terminate the test runner's JVM."); - }); - - // we passed the stack check, delegate to super, so default policy can still deny permission: - super.checkExit(status); - } - -} diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index cca46cfa98c..8151876ca5d 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -98,8 +98,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { private List loadIndexesList(String prefix) throws IOException { List indexes = new ArrayList<>(); - Path dir = getDataPath("."); - try (DirectoryStream stream = Files.newDirectoryStream(dir, prefix + "-*.zip")) { + try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), prefix + "-*.zip")) { for (Path path : stream) { indexes.add(path.getFileName().toString()); } @@ -166,7 +165,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-"); // decompress the index - Path backwardsIndex = getDataPath(indexFile); + Path backwardsIndex = getBwcIndicesPath().resolve(indexFile); try (InputStream stream = Files.newInputStream(backwardsIndex)) { TestUtil.unzip(stream, unzipDir); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index d598ba9e9a0..486267bf70c 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -31,7 +31,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati String indexName = "unsupported-0.20.6"; logger.info("Checking static index " + indexName); - Settings nodeSettings = prepareBackwardsDataDir(getDataPath(indexName + ".zip"), Node.HTTP_ENABLED, true); + Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), Node.HTTP_ENABLED, true); try { internalCluster().startNode(nodeSettings); fail(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 14bcfc79076..9ef4238e3b9 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -62,12 +62,12 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { // Configure using path.repo return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.repo", reposRoot()) + .put("path.repo", getBwcIndicesPath()) .build(); } else { // Configure using url white list try { - URI repoJarPatternUri = new URI("jar:" + reposRoot().toUri().toString() + "*.zip!/repo/"); + URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/"); return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .putArray("repositories.url.allowed_urls", repoJarPatternUri.toString()) @@ -128,10 +128,6 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { } } - private Path reposRoot() { - return getDataPath("."); - } - private List repoVersions() throws Exception { return listRepoVersions("repo"); } @@ -142,7 +138,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { private List listRepoVersions(String prefix) throws Exception { List repoVersions = new ArrayList<>(); - Path repoFiles = reposRoot(); + Path repoFiles = getBwcIndicesPath(); try (DirectoryStream stream = Files.newDirectoryStream(repoFiles, prefix + "-*.zip")) { for (Path entry : stream) { String fileName = entry.getFileName().toString(); @@ -155,8 +151,8 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { } private void createRepo(String prefix, String version, String repo) throws Exception { - String repoFile = prefix + "-" + version + ".zip"; - URI repoFileUri = getDataPath(repoFile).toUri(); + Path repoFile = getBwcIndicesPath().resolve(prefix + "-" + version + ".zip"); + URI repoFileUri = repoFile.toUri(); URI repoJarUri = new URI("jar:" + repoFileUri.toString() + "!/repo/"); logger.info("--> creating repository [{}] for version [{}]", repo, version); assertAcked(client().admin().cluster().preparePutRepository(repo) diff --git a/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java b/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java index 37927ef807c..99cd417133d 100644 --- a/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java +++ b/core/src/test/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java @@ -19,10 +19,10 @@ package org.elasticsearch.cache.recycler; -import com.google.common.collect.Sets; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.recycler.Recycler.V; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +46,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler { // and releasing pages, lets make sure that after a reasonable timeout, all master // copy (snapshot) have been released boolean success = - ESTestCase.awaitBusy(() -> Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty()); + ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet())); if (!success) { masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet()); ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on diff --git a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index f3cfe2ef24a..eff22c8670a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -18,20 +18,27 @@ */ package org.elasticsearch.cluster; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; -import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDeciderTests; -import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; +import java.util.AbstractMap; +import java.util.Collections; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; /** @@ -57,6 +64,21 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { private final ClusterName clusterName; private volatile NodeStats[] stats = new NodeStats[3]; + /** Create a fake NodeStats for the given node and usage */ + public static NodeStats makeStats(String nodeName, DiskUsage usage) { + FsInfo.Path[] paths = new FsInfo.Path[1]; + FsInfo.Path path = new FsInfo.Path("/dev/null", null, + usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes()); + paths[0] = path; + FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths); + return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT), + System.currentTimeMillis(), + null, null, null, null, null, + fsInfo, + null, null, null, + null); + } + @Inject public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, TransportNodesStatsAction transportNodesStatsAction, @@ -64,21 +86,21 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { ClusterService clusterService, ThreadPool threadPool) { super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool); this.clusterName = ClusterName.clusterNameFromSettings(settings); - stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); - stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); - stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", "/dev/null", 100, 100)); + stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100)); + stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100)); + stats[2] = makeStats("node_t3", new DiskUsage("node_t3", "n3", "/dev/null", 100, 100)); } public void setN1Usage(String nodeName, DiskUsage newUsage) { - stats[0] = MockDiskUsagesIT.makeStats(nodeName, newUsage); + stats[0] = makeStats(nodeName, newUsage); } public void setN2Usage(String nodeName, DiskUsage newUsage) { - stats[1] = MockDiskUsagesIT.makeStats(nodeName, newUsage); + stats[1] = makeStats(nodeName, newUsage); } public void setN3Usage(String nodeName, DiskUsage newUsage) { - stats[2] = MockDiskUsagesIT.makeStats(nodeName, newUsage); + stats[2] = makeStats(nodeName, newUsage); } @Override @@ -96,6 +118,28 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { public ClusterInfo getClusterInfo() { ClusterInfo clusterInfo = super.getClusterInfo(); - return new ClusterInfo(clusterInfo.getNodeLeastAvailableDiskUsages(), clusterInfo.getNodeMostAvailableDiskUsages(), clusterInfo.shardSizes, DiskThresholdDeciderTests.DEV_NULL_MAP); + return new ClusterInfo(clusterInfo.getNodeLeastAvailableDiskUsages(), clusterInfo.getNodeMostAvailableDiskUsages(), clusterInfo.shardSizes, DEV_NULL_MAP); + } + + public static final Map DEV_NULL_MAP = Collections.unmodifiableMap(new StaticValueMap("/dev/null")); + + // a test only map that always returns the same value no matter what key is passed + private static final class StaticValueMap extends AbstractMap { + + private final String value; + + private StaticValueMap(String value) { + this.value = value; + } + + @Override + public String get(Object key) { + return value; + } + + @Override + public Set> entrySet() { + throw new UnsupportedOperationException("this is a test-only map that only supports #get(Object key)"); + } } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java index 8e906aeea0c..4163ad05d60 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java @@ -26,7 +26,6 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Test; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.test.VersionUtils.randomVersion; public class HumanReadableIndexSettingsTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 1a0e9981719..3bae8e158d8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -19,15 +19,14 @@ package org.elasticsearch.cluster.metadata; -import com.google.common.collect.Sets; import org.elasticsearch.Version; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData.State; import org.elasticsearch.common.Strings; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.test.ESTestCase; import org.junit.Test; @@ -36,8 +35,14 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; -import static com.google.common.collect.Sets.newHashSet; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; /** */ @@ -562,7 +567,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { .put(indexBuilder("kuku")); ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.lenientExpandOpen()); - assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, new String[]{})), equalTo(Sets.newHashSet("kuku", "testXXX"))); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndices(context, new String[]{})), equalTo(newHashSet("kuku", "testXXX"))); } @Test diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index d2f55fc13f8..4c364151abb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -28,7 +28,7 @@ import org.junit.Test; import java.util.Arrays; -import static com.google.common.collect.Sets.newHashSet; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.equalTo; public class WildcardExpressionResolverTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index 6b2e972e244..960ccc52090 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; @@ -93,6 +94,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { } @Test + @TestLogging("_root:DEBUG") public void testDelayedUnassignedScheduleReroute() throws Exception { AllocationService allocation = createAllocationService(); MetaData metaData = MetaData.builder() @@ -108,7 +110,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); // starting replicas clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); - assertThat(clusterState.getRoutingNodes().hasUnassigned(), equalTo(false)); + assertFalse("no shards should be unassigned", clusterState.getRoutingNodes().hasUnassigned()); // remove node2 and reroute ClusterState prevState = clusterState; clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build(); @@ -123,7 +125,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { assertBusy(new Runnable() { @Override public void run() { - assertThat(routingService.hasReroutedAndClear(), equalTo(true)); + assertTrue("routing service should have run a reroute", routingService.hasReroutedAndClear()); } }); // verify the registration has been reset @@ -186,6 +188,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { @Override protected void performReroute(String reason) { + logger.info("--> performing fake reroute [{}]", reason); rerouted.set(true); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index f1f29d5e691..74989056c0f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -785,7 +785,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); - logger.info("--> adding two nodes on same rack and do rerouting"); + logger.info("--> adding two nodes in different zones and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .put(newNode("A-0", ImmutableMap.of("zone", "a"))) .put(newNode("B-0", ImmutableMap.of("zone", "b"))) @@ -826,4 +826,45 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("A-0").size(), equalTo(3)); assertThat(clusterState.getRoutingNodes().node("B-0").size(), equalTo(5)); } + + @Test + public void testUnassignedShardsWithUnbalancedZones() { + AllocationService strategy = createAllocationService(settingsBuilder() + .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.awareness.attributes", "zone") + .build()); + + logger.info("Building initial routing table for 'testUnassignedShardsWithUnbalancedZones'"); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(4)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + + logger.info("--> adding 5 nodes in different zones and do rerouting"); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() + .put(newNode("A-0", ImmutableMap.of("zone", "a"))) + .put(newNode("A-1", ImmutableMap.of("zone", "a"))) + .put(newNode("A-2", ImmutableMap.of("zone", "a"))) + .put(newNode("A-3", ImmutableMap.of("zone", "a"))) + .put(newNode("B-0", ImmutableMap.of("zone", "b"))) + ).build(); + routingTable = strategy.reroute(clusterState).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); + assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); + + logger.info("--> start the shard (primary)"); + routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(3)); + assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); // Unassigned shard is expected. + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index a6b0e414f59..e197dbd49c3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -36,14 +36,22 @@ import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Test; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.elasticsearch.cluster.routing.allocation.RoutingNodesUtils.numberOfShardsOfType; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; /** * @@ -262,7 +270,7 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { assertThat(nodeIndex, lessThan(25)); } RoutingNodes routingNodes = clusterState.getRoutingNodes(); - Set encounteredIndices = newHashSet(); + Set encounteredIndices = new HashSet<>(); for (RoutingNode routingNode : routingNodes) { assertThat(routingNode.numberOfShardsWithState(STARTED), equalTo(0)); assertThat(routingNode.size(), equalTo(2)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index fbecc86a0f3..dfdd9ba5948 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -88,7 +89,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Map shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -282,7 +283,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Map shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -344,7 +345,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // Make node without the primary now habitable to replicas usages.put(nodeWithoutPrimary, new DiskUsage(nodeWithoutPrimary, "", "/dev/null", 100, 35)); // 65% used - final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); cis = new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { @@ -543,7 +544,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Map shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -610,7 +611,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Map shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 10L); // 10 bytes shardSizes.put("[test][0][r]", 10L); // 10 bytes - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -714,7 +715,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { shardSizes.put("[test][0][r]", 14L); shardSizes.put("[test2][0][p]", 1L); // 1 bytes shardSizes.put("[test2][0][r]", 1L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( @@ -817,7 +818,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Map shardSizes = new HashMap<>(); shardSizes.put("[test][0][p]", 40L); shardSizes.put("[test][1][p]", 40L); - final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); DiskThresholdDecider diskThresholdDecider = new DiskThresholdDecider(diskSettings); MetaData metaData = MetaData.builder() @@ -925,26 +926,4 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { rn.shardsWithState(RELOCATING), rn.shardsWithState(STARTED)); } - - public static final Map DEV_NULL_MAP = Collections.unmodifiableMap(new StaticValueMap("/dev/null")); - - // a test only map that always returns the same value no matter what key is passed - private static final class StaticValueMap extends AbstractMap { - - private final String value; - - private StaticValueMap(String value) { - this.value = value; - } - - @Override - public String get(Object key) { - return value; - } - - @Override - public Set> entrySet() { - throw new UnsupportedOperationException("this is a test-only map that only supports #get(Object key)"); - } - } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 853f669c6d5..6460664c47d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -20,12 +20,21 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.Version; -import org.elasticsearch.cluster.*; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterInfoService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.MockInternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingHelper; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; @@ -215,7 +224,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { shardSizes.put("[test][1][r]", 100L); shardSizes.put("[test][2][r]", 1000L); shardSizes.put("[other][0][p]", 10000L); - ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP, shardSizes, DiskThresholdDeciderTests.DEV_NULL_MAP); + ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP, shardSizes, MockInternalClusterInfoService.DEV_NULL_MAP); ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, "node1"); ShardRoutingHelper.moveToStarted(test_0); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 121c477637a..8396f61b7f7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterInfo; @@ -28,11 +26,8 @@ import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.MockInternalClusterInfoService; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; @@ -170,20 +165,4 @@ public class MockDiskUsagesIT extends ESIntegTestCase { } }); } - - /** Create a fake NodeStats for the given node and usage */ - public static NodeStats makeStats(String nodeName, DiskUsage usage) { - FsInfo.Path[] paths = new FsInfo.Path[1]; - FsInfo.Path path = new FsInfo.Path("/dev/null", null, - usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes()); - paths[0] = path; - FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths); - return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT), - System.currentTimeMillis(), - null, null, null, null, null, - fsInfo, - null, null, null, - null); - } - } diff --git a/core/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/core/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java index bb9cad1ce27..02d822daf84 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ b/core/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java @@ -20,10 +20,10 @@ package org.elasticsearch.common.cli; import com.google.common.base.Charsets; -import com.google.common.collect.Sets; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.junit.Test; @@ -32,10 +32,18 @@ import java.io.IOException; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.attribute.*; +import java.nio.file.attribute.GroupPrincipal; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFileAttributes; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.UserPrincipal; import java.util.Set; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index aff6e7a0a34..1c7a6abeaca 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.lucene.index; -import com.google.common.collect.Sets; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -50,6 +49,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -123,7 +123,7 @@ public class FreqTermsEnumTests extends ESTestCase { } } - Set deletedIds = Sets.newHashSet(); + Set deletedIds = new HashSet<>(); for (int i = 0; i < docs.length; i++) { Document doc = docs[i]; if (randomInt(5) == 2) { @@ -157,7 +157,7 @@ public class FreqTermsEnumTests extends ESTestCase { } private void addFreqs(Document doc, Map reference) { - Set addedDocFreq = Sets.newHashSet(); + Set addedDocFreq = new HashSet<>(); for (IndexableField field : doc.getFields("field")) { String term = field.stringValue(); FreqHolder freqHolder = reference.get(term); diff --git a/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java b/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java index de5a8bbd4f0..50f1886431a 100644 --- a/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/core/src/test/java/org/elasticsearch/common/util/MockBigArrays.java @@ -21,12 +21,12 @@ package org.elasticsearch.common.util; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.SeedUtils; -import com.google.common.collect.Sets; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -58,7 +58,7 @@ public class MockBigArrays extends BigArrays { // not empty, we might be executing on a shared cluster that keeps on obtaining // and releasing arrays, lets make sure that after a reasonable timeout, all master // copy (snapshot) have been released - boolean success = ESTestCase.awaitBusy(() -> Sets.intersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()).isEmpty()); + boolean success = ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet())); if (!success) { masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet()); ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java index fe3051b4111..8d495a006cb 100644 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.util; import com.google.common.base.Charsets; -import com.google.common.collect.Sets; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -28,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.shard.ShardId; @@ -39,8 +39,16 @@ import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; -import java.nio.file.*; -import java.util.*; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; /** */ @@ -133,8 +141,7 @@ public class MultiDataPathUpgraderTests extends ESTestCase { */ public void testUpgradeRealIndex() throws IOException, URISyntaxException { List indexes = new ArrayList<>(); - Path dir = getDataPath("/" + OldIndexBackwardsCompatibilityIT.class.getPackage().getName().replace('.', '/')); // the files are in the same pkg as the OldIndexBackwardsCompatibilityTests test - try (DirectoryStream stream = Files.newDirectoryStream(dir, "index-*.zip")) { + try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { for (Path path : stream) { indexes.add(path); } diff --git a/core/src/test/java/org/elasticsearch/index/deletionpolicy/SnapshotIndexCommitExistsMatcher.java b/core/src/test/java/org/elasticsearch/index/deletionpolicy/SnapshotIndexCommitExistsMatcher.java index 6ef7974d170..26f723b8cb1 100644 --- a/core/src/test/java/org/elasticsearch/index/deletionpolicy/SnapshotIndexCommitExistsMatcher.java +++ b/core/src/test/java/org/elasticsearch/index/deletionpolicy/SnapshotIndexCommitExistsMatcher.java @@ -19,15 +19,13 @@ package org.elasticsearch.index.deletionpolicy; -import com.google.common.collect.Sets; +import org.elasticsearch.common.util.set.Sets; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.io.IOException; -import java.util.Arrays; import java.util.HashSet; -import java.util.List; /** * diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 5a1efd1bfbb..35939d37807 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1786,8 +1786,7 @@ public class InternalEngineTests extends ESTestCase { public void testUpgradeOldIndex() throws IOException { List indexes = new ArrayList<>(); - Path dir = getDataPath("/" + OldIndexBackwardsCompatibilityIT.class.getPackage().getName().replace('.', '/')); // the files are in the same pkg as the OldIndexBackwardsCompatibilityTests test - try (DirectoryStream stream = Files.newDirectoryStream(dir, "index-*.zip")) { + try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { for (Path path : stream) { indexes.add(path); } diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index b6ff913d068..a307cf180c5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; @@ -107,7 +108,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCaseasList(tq1, tq2), 0f), Occur.SHOULD); assertEquals(expected.build(), rewrittenQuery); } @@ -1984,18 +2042,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = jsonBuilder().startObject().startObject("function_score") .startArray("functions") - .startObject().field("weight", 2).field("boost_factor", 2).endObject() - .endArray() - .endObject().endObject().string(); - try { - queryParser.parse(query).query(); - fail("Expect exception here because boost_factor must not have a weight"); - } catch (QueryParsingException e) { - assertThat(e.getDetailedMessage(), containsString(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE)); - } - query = jsonBuilder().startObject().startObject("function_score") - .startArray("functions") - .startObject().field("boost_factor",2).endObject() + .startObject().startObject("script_score").field("script", "3").endObject().endObject() .endArray() .field("weight", 2) .endObject().endObject().string(); @@ -2008,7 +2055,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { query = jsonBuilder().startObject().startObject("function_score") .field("weight", 2) .startArray("functions") - .startObject().field("boost_factor",2).endObject() + .startObject().endObject() .endArray() .endObject().endObject().string(); try { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 5d153c70524..93bc537299c 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -1157,8 +1157,7 @@ public class TranslogTests extends ESTestCase { public void testUpgradeOldTranslogFiles() throws IOException { List indexes = new ArrayList<>(); - Path dir = getDataPath("/" + OldIndexBackwardsCompatibilityIT.class.getPackage().getName().replace('.', '/')); // the files are in the same pkg as the OldIndexBackwardsCompatibilityTests test - try (DirectoryStream stream = Files.newDirectoryStream(dir, "index-*.zip")) { + try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { for (Path path : stream) { indexes.add(path); } diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 8ea074fb94c..d9f8f71c215 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.indexlifecycle; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -31,6 +30,7 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -43,10 +43,16 @@ import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.cluster.routing.ShardRoutingState.*; +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; /** diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java index af5c2ef8b09..ed73a44c517 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java @@ -18,11 +18,11 @@ */ package org.elasticsearch.indices.recovery; -import com.google.common.collect.Sets; import org.apache.lucene.store.IndexOutput; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.StoreFileMetaData; diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index b01ec7865f8..d40ebf57a48 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -19,7 +19,9 @@ package org.elasticsearch.indices.stats; +import org.elasticsearch.index.VersionType; import org.elasticsearch.index.cache.IndexCacheModule; +import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.Version; @@ -41,7 +43,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.cache.query.QueryCacheStats; -import org.elasticsearch.index.cache.query.index.IndexQueryCache; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.index.store.IndexStore; @@ -368,7 +369,6 @@ public class IndexStatsIT extends ESIntegTestCase { client().prepareIndex("test1", "type1", Integer.toString(1)).setSource("field", "value").execute().actionGet(); client().prepareIndex("test1", "type2", Integer.toString(1)).setSource("field", "value").execute().actionGet(); client().prepareIndex("test2", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - refresh(); NumShards test1 = getNumShards("test1"); @@ -381,6 +381,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getPrimaries().getDocs().getCount(), equalTo(3l)); assertThat(stats.getTotal().getDocs().getCount(), equalTo(totalExpectedWrites)); assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexCount(), equalTo(3l)); + assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0l)); assertThat(stats.getPrimaries().getIndexing().getTotal().isThrottled(), equalTo(false)); assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0l)); assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites)); @@ -423,10 +424,12 @@ public class IndexStatsIT extends ESIntegTestCase { stats = client().admin().indices().prepareStats().setTypes("type1", "type").execute().actionGet(); assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCount(), equalTo(1l)); assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type").getIndexCount(), equalTo(1l)); + assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexFailedCount(), equalTo(0l)); assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type2"), nullValue()); assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCurrent(), equalTo(0l)); assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getDeleteCurrent(), equalTo(0l)); + assertThat(stats.getTotal().getGet().getCount(), equalTo(0l)); // check get GetResponse getResponse = client().prepareGet("test1", "type1", "1").execute().actionGet(); @@ -462,6 +465,30 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getTotal().getIndexing(), nullValue()); assertThat(stats.getTotal().getGet(), nullValue()); assertThat(stats.getTotal().getSearch(), nullValue()); + + // index failed + try { + client().prepareIndex("test1", "type1", Integer.toString(1)).setSource("field", "value").setVersion(1) + .setVersionType(VersionType.EXTERNAL).execute().actionGet(); + fail("Expected a version conflict"); + } catch (VersionConflictEngineException e) {} + try { + client().prepareIndex("test1", "type2", Integer.toString(1)).setSource("field", "value").setVersion(1) + .setVersionType(VersionType.EXTERNAL).execute().actionGet(); + fail("Expected a version conflict"); + } catch (VersionConflictEngineException e) {} + try { + client().prepareIndex("test2", "type", Integer.toString(1)).setSource("field", "value").setVersion(1) + .setVersionType(VersionType.EXTERNAL).execute().actionGet(); + fail("Expected a version conflict"); + } catch (VersionConflictEngineException e) {} + + stats = client().admin().indices().prepareStats().setTypes("type1", "type2").execute().actionGet(); + assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(2l)); + assertThat(stats.getIndex("test2").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(1l)); + assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexFailedCount(), equalTo(1L)); + assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type2").getIndexFailedCount(), equalTo(1L)); + assertThat(stats.getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(3L)); } @Test diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 8be22a8839e..3282b948cff 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.indices.template; -import com.google.common.collect.Sets; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -42,12 +41,24 @@ import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; /** * @@ -379,7 +390,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { searchResponse = client().prepareSearch("complex_filtered_alias").get(); assertHitCount(searchResponse, 3l); - Set types = Sets.newHashSet(); + Set types = new HashSet<>(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { types.add(searchHit.getType()); } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 6b8818a4931..94bae66702b 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -54,11 +54,14 @@ public class OsProbeTests extends ESTestCase { } assertNotNull(stats.getMem()); - assertThat(stats.getMem().getTotal().bytes(), greaterThan(0L)); - assertThat(stats.getMem().getFree().bytes(), greaterThan(0L)); - assertThat(stats.getMem().getFreePercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); - assertThat(stats.getMem().getUsed().bytes(), greaterThan(0L)); - assertThat(stats.getMem().getUsedPercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); + // TODO: once java 9 is sorted out make these hard checks (currently 9-ea and 9-ea-jigsaw will differ) + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + assertThat(stats.getMem().getTotal().bytes(), greaterThan(0L)); + assertThat(stats.getMem().getFree().bytes(), greaterThan(0L)); + assertThat(stats.getMem().getFreePercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); + assertThat(stats.getMem().getUsed().bytes(), greaterThan(0L)); + assertThat(stats.getMem().getUsedPercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); + } assertNotNull(stats.getSwap()); assertNotNull(stats.getSwap().getTotal()); @@ -70,9 +73,12 @@ public class OsProbeTests extends ESTestCase { assertThat(stats.getSwap().getUsed().bytes(), greaterThanOrEqualTo(0L)); } else { // On platforms with no swap - assertThat(stats.getSwap().getTotal().bytes(), equalTo(0L)); - assertThat(stats.getSwap().getFree().bytes(), equalTo(0L)); - assertThat(stats.getSwap().getUsed().bytes(), equalTo(0L)); + // TODO: one java 9 is sorted out make these hard checks (currently 9-ea and 9-ea-jigsaw will differ) + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + assertThat(stats.getSwap().getTotal().bytes(), equalTo(0L)); + assertThat(stats.getSwap().getFree().bytes(), equalTo(0L)); + assertThat(stats.getSwap().getUsed().bytes(), equalTo(0L)); + } } } } diff --git a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java index 18b5f7a7e19..21bfcb287ac 100644 --- a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java @@ -51,8 +51,11 @@ public class ProcessProbeTests extends ESTestCase { assertThat(stats.getOpenFileDescriptors(), equalTo(-1L)); assertThat(stats.getMaxFileDescriptors(), equalTo(-1L)); } else { - assertThat(stats.getOpenFileDescriptors(), greaterThan(0L)); - assertThat(stats.getMaxFileDescriptors(), greaterThan(0L)); + // TODO: once java 9 is sorted out make these hard checks (currently 9-ea and 9-ea-jigsaw will differ) + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + assertThat(stats.getOpenFileDescriptors(), greaterThan(0L)); + assertThat(stats.getMaxFileDescriptors(), greaterThan(0L)); + } } ProcessStats.Cpu cpu = stats.getCpu(); @@ -62,11 +65,14 @@ public class ProcessProbeTests extends ESTestCase { assertThat(cpu.getPercent(), anyOf(lessThan((short) 0), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100)))); // CPU time can return -1 if the the platform does not support this operation, let's see which platforms fail - assertThat(cpu.total, greaterThan(0L)); + if (!Constants.JRE_IS_MINIMUM_JAVA9) { + // TODO: once java 9 is sorted out make these hard checks (currently 9-ea and 9-ea-jigsaw will differ) + assertThat(cpu.total, greaterThan(0L)); - ProcessStats.Mem mem = stats.getMem(); - assertNotNull(mem); - // Commited total virtual memory can return -1 if not supported, let's see which platforms fail - assertThat(mem.totalVirtual, greaterThan(0L)); + ProcessStats.Mem mem = stats.getMem(); + assertNotNull(mem); + // Commited total virtual memory can return -1 if not supported, let's see which platforms fail + assertThat(mem.totalVirtual, greaterThan(0L)); + } } } diff --git a/core/src/test/java/org/elasticsearch/network/DirectBufferNetworkIT.java b/core/src/test/java/org/elasticsearch/network/DirectBufferNetworkIT.java index 6d79de93770..40da9aeca0e 100644 --- a/core/src/test/java/org/elasticsearch/network/DirectBufferNetworkIT.java +++ b/core/src/test/java/org/elasticsearch/network/DirectBufferNetworkIT.java @@ -54,6 +54,7 @@ public class DirectBufferNetworkIT extends ESIntegTestCase { */ @Test public void verifySaneDirectBufferAllocations() throws Exception { + assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null); createIndex("test"); int estimatedBytesSize = scaledRandomIntBetween(ByteSizeValue.parseBytesSizeValue("1.1mb", "estimatedBytesSize").bytesAsInt(), diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 85b46a69476..57bf5592d36 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -45,8 +45,9 @@ import org.elasticsearch.index.percolator.PercolatorException; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.index.query.functionscore.factor.FactorBuilder; import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.index.query.QueryParsingException; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -55,43 +56,16 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NavigableSet; -import java.util.Set; -import java.util.TreeSet; +import java.util.*; import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.common.settings.Settings.builder; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; +import static org.elasticsearch.common.xcontent.XContentFactory.*; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.emptyArray; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * @@ -1447,7 +1421,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f))) + .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) .setScore(true) .execute().actionGet(); assertNoFailures(response); @@ -1479,7 +1453,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f))) + .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); @@ -1511,7 +1485,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1").highlightQuery(QueryBuilders.matchQuery("field1", "jumps"))) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f))) + .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); @@ -1548,7 +1522,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setGetRequest(Requests.getRequest("test").type("type").id("1")) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f))) + .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java index 0c9db6c87d8..30946ccdb92 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java @@ -19,12 +19,14 @@ package org.elasticsearch.plugins; +import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliToolTestCase; import org.junit.Test; import java.io.IOException; import static org.elasticsearch.common.cli.CliTool.ExitStatus.OK_AND_EXIT; +import static org.elasticsearch.common.cli.CliTool.ExitStatus.IO_ERROR; import static org.hamcrest.Matchers.*; public class PluginManagerCliTests extends CliToolTestCase { @@ -50,4 +52,11 @@ public class PluginManagerCliTests extends CliToolTestCase { assertThat(new PluginManagerCliParser(terminal).execute(args("list -h")), is(OK_AND_EXIT)); assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help"); } + + public void testUrlSpacesInPath() { + CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal(); + CliTool.ExitStatus execute = new PluginManagerCliParser(terminal).execute(args("install file://foo%20deps")); + assertThat(execute.status(), is(IO_ERROR.status())); + + } } diff --git a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java index db6741b33e1..2d823292e55 100644 --- a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java @@ -29,8 +29,8 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; -import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.metadata.AliasAction.newAddAliasAction; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 2d1a563294d..f9db13b889b 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService.ScriptType; @@ -121,7 +120,7 @@ public class ScriptModesTests extends ESTestCase { @Test public void testScriptTypeGenericSettings() { int randomInt = randomIntBetween(1, ScriptType.values().length - 1); - Set randomScriptTypesSet = Sets.newHashSet(); + Set randomScriptTypesSet = new HashSet<>(); ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; @@ -154,7 +153,7 @@ public class ScriptModesTests extends ESTestCase { @Test public void testScriptContextGenericSettings() { int randomInt = randomIntBetween(1, scriptContexts.length - 1); - Set randomScriptContextsSet = Sets.newHashSet(); + Set randomScriptContextsSet = new HashSet<>(); ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index f6dff6ecf92..6bbbdb501c0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import com.google.common.collect.Sets; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.DistanceUnit; @@ -35,6 +33,7 @@ import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -360,7 +359,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Terms cities = bucket.getAggregations().get("cities"); assertThat(cities, Matchers.notNullValue()); - Set names = Sets.newHashSet(); + Set names = new HashSet<>(); for (Terms.Bucket city : cities.getBuckets()) { names.add(city.getKeyAsString()); } @@ -380,7 +379,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); cities = bucket.getAggregations().get("cities"); assertThat(cities, Matchers.notNullValue()); - names = Sets.newHashSet(); + names = new HashSet<>(); for (Terms.Bucket city : cities.getBuckets()) { names.add(city.getKeyAsString()); } @@ -400,7 +399,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); cities = bucket.getAggregations().get("cities"); assertThat(cities, Matchers.notNullValue()); - names = Sets.newHashSet(); + names = new HashSet<>(); for (Terms.Bucket city : cities.getBuckets()) { names.add(city.getKeyAsString()); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 507939e6858..f7c1d060bd3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.aggregations.pipeline; * under the License. */ +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; @@ -41,9 +41,9 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.percentilesBucket; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.sumBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; @@ -433,30 +433,22 @@ public class PercentilesBucketIT extends ESIntegTestCase { } @Test - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/13179") public void testBadPercents() throws Exception { Double[] badPercents = {-1.0, 110.0}; try { - SearchResponse response = client().prepareSearch("idx") + client().prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket") .setBucketsPaths("terms>sum") .percents(badPercents)).execute().actionGet(); - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - List buckets = terms.getBuckets(); - assertThat(buckets.size(), equalTo(0)); - - PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); - fail("Illegal percent's were provided but no exception was thrown."); } catch (SearchPhaseExecutionException exception) { - // All good + ElasticsearchException[] rootCauses = exception.guessRootCauses(); + assertThat(rootCauses.length, equalTo(1)); + ElasticsearchException rootCause = rootCauses[0]; + assertThat(rootCause.getMessage(), containsString("must only contain non-null doubles from 0.0-100.0 inclusive")); } } @@ -466,7 +458,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { Double[] badPercents = {-1.0, 110.0}; try { - SearchResponse response = client() + client() .prepareSearch("idx") .addAggregation( terms("terms") @@ -479,11 +471,12 @@ public class PercentilesBucketIT extends ESIntegTestCase { .setBucketsPaths("histo>_count") .percents(badPercents))).execute().actionGet(); - PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); - fail("Illegal percent's were provided but no exception was thrown."); } catch (SearchPhaseExecutionException exception) { - // All good + ElasticsearchException[] rootCauses = exception.guessRootCauses(); + assertThat(rootCauses.length, equalTo(1)); + ElasticsearchException rootCause = rootCauses[0]; + assertThat(rootCause.getMessage(), containsString("must only contain non-null doubles from 0.0-100.0 inclusive")); } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 246da2508d8..969a2d7b48f 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -20,18 +20,14 @@ package org.elasticsearch.search.basic; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; - import com.google.common.base.Charsets; -import com.google.common.collect.Sets; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; @@ -48,20 +44,29 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.io.IOException; +import java.util.HashSet; import java.util.Set; +import java.util.TreeSet; -import static org.elasticsearch.action.search.SearchType.*; -import static org.elasticsearch.client.Requests.*; +import static org.elasticsearch.action.search.SearchType.DFS_QUERY_AND_FETCH; +import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; +import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH; +import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; +import static org.elasticsearch.client.Requests.createIndexRequest; +import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class TransportTwoNodesSearchIT extends ESIntegTestCase { @@ -75,7 +80,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { } private Set prepareData(int numShards) throws Exception { - Set fullExpectedIds = Sets.newTreeSet(); + Set fullExpectedIds = new TreeSet<>(); Settings.Builder settingsBuilder = settingsBuilder() .put(indexSettings()) @@ -214,7 +219,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .query(matchAllQuery()) .explain(true); - Set collectedIds = Sets.newTreeSet(); + Set collectedIds = new TreeSet<>(); SearchResponse searchResponse = client().search(searchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)).actionGet(); assertNoFailures(searchResponse); @@ -268,7 +273,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .query(termQuery("multi", "test")) .from(0).size(20).explain(true); - Set expectedIds = Sets.newHashSet(); + Set expectedIds = new HashSet<>(); for (int i = 0; i < 100; i++) { expectedIds.add(Integer.toString(i)); } @@ -308,7 +313,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .query(termQuery("multi", "test")) .from(0).size(20).explain(true); - Set expectedIds = Sets.newHashSet(); + Set expectedIds = new HashSet<>(); for (int i = 0; i < 100; i++) { expectedIds.add(Integer.toString(i)); } diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index c9d8b8104e5..b88eed1d65b 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.child; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.count.CountResponse; @@ -34,7 +35,6 @@ import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.ScoreType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -50,43 +50,15 @@ import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; +import java.util.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; -import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * @@ -293,7 +265,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { for (int i = 1; i <= 10; i++) { logger.info("Round {}", i); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.MAX))) + .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Max))) .get(); assertNoFailures(searchResponse); searchResponse = client().prepareSearch("test") @@ -555,7 +527,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX)) + CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertHitCount(countResponse, 1l); @@ -586,7 +558,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test") .setExplain(true) - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX)) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); @@ -599,7 +571,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId) - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX)) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.getExplanation().getDetails()[0].getDescription(), equalTo("Score based on join value p1")); @@ -677,7 +649,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.SUM)).get(); + .boostMode(CombineFunction.REPLACE.getName())).scoreMode(ScoreMode.Total)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("1")); @@ -694,7 +666,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.MAX)).get(); + .boostMode(CombineFunction.REPLACE.getName())).scoreMode(ScoreMode.Max)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -711,7 +683,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreType(ScoreType.AVG)).get(); + .boostMode(CombineFunction.REPLACE.getName())).scoreMode(ScoreMode.Avg)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -768,7 +740,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreType(ScoreType.MAX)) + response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreMode(ScoreMode.Max)) .get(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); @@ -864,7 +836,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchType[] searchTypes = new SearchType[]{SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH}; for (SearchType searchType : searchTypes) { SearchResponse searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreType(ScoreType.MAX)).addSort("p_field", SortOrder.ASC) + .setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreMode(ScoreMode.Max)).addSort("p_field", SortOrder.ASC) .setSize(5).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(10L)); @@ -907,7 +879,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType(ScoreType.SUM)).get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total)).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); @@ -932,7 +904,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").get(); } - searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType(ScoreType.SUM)) + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total)) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); @@ -968,7 +940,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c5").setSource("c_field", "x").setParent("p2").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreType(ScoreType.SUM)) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Total)) .setMinScore(3) // Score needs to be 3 or above! .get(); assertNoFailures(searchResponse); @@ -1237,15 +1209,15 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); refresh(); - ScoreType scoreMode = ScoreType.values()[getRandom().nextInt(ScoreType.values().length)]; + ScoreMode scoreMode = randomFrom(ScoreMode.values()); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreType(scoreMode)).filter(notQuery(termQuery("p_field", "3")))) + .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreMode(scoreMode)).filter(notQuery(termQuery("p_field", "3")))) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "red")).scoreType(scoreMode)).filter(notQuery(termQuery("p_field", "3")))) + .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "red")).scoreMode(scoreMode)).filter(notQuery(termQuery("p_field", "3")))) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); @@ -1263,7 +1235,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX).queryName("test")) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max).queryName("test")) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); @@ -1311,7 +1283,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { try { client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType(ScoreType.MAX)) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); fail(); } catch (SearchPhaseExecutionException e) { @@ -1579,28 +1551,23 @@ public class ChildQuerySearchIT extends ESIntegTestCase { return indexBuilders; } - private SearchResponse minMaxQuery(ScoreType scoreType, int minChildren, int maxChildren) throws SearchPhaseExecutionException { - return client() - .prepareSearch("test") - .setQuery( - QueryBuilders - .hasChildQuery( - "child", - QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two"))).boostMode("replace").scoreMode("sum") - .add(QueryBuilders.matchAllQuery(), weightFactorFunction(1)) - .add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)) - .add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreType(scoreType) - .minChildren(minChildren).maxChildren(maxChildren)) - .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); - } + private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { + HasChildQueryBuilder hasChildQuery = hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two"))).boostMode("replace").scoreMode("sum") + .add(QueryBuilders.matchAllQuery(), weightFactorFunction(1)) + .add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)) + .add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreMode(scoreMode) + .minChildren(minChildren); + + if (maxChildren != null) { + hasChildQuery.maxChildren(maxChildren); + } - private SearchResponse minMaxFilter(int minChildren, int maxChildren) throws SearchPhaseExecutionException { return client() .prepareSearch("test") - .setQuery( - QueryBuilders.constantScoreQuery(QueryBuilders.hasChildQuery("child", termQuery("foo", "two")) - .minChildren(minChildren).maxChildren(maxChildren))) - .addSort("id", SortOrder.ASC).setTrackScores(true).get(); + .setQuery(hasChildQuery) + .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); } @Test @@ -1614,7 +1581,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse response; // Score mode = NONE - response = minMaxQuery(ScoreType.NONE, 0, 0); + response = minMaxQuery(ScoreMode.None, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1624,7 +1591,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 1, 0); + response = minMaxQuery(ScoreMode.None, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1634,7 +1601,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 2, 0); + response = minMaxQuery(ScoreMode.None, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1642,17 +1609,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("4")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 3, 0); + response = minMaxQuery(ScoreMode.None, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 4, 0); + response = minMaxQuery(ScoreMode.None, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery(ScoreType.NONE, 0, 4); + response = minMaxQuery(ScoreMode.None, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1662,7 +1629,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 0, 3); + response = minMaxQuery(ScoreMode.None, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1672,7 +1639,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 0, 2); + response = minMaxQuery(ScoreMode.None, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1680,21 +1647,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.NONE, 2, 2); + response = minMaxQuery(ScoreMode.None, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(1f)); try { - response = minMaxQuery(ScoreType.NONE, 3, 2); + response = minMaxQuery(ScoreMode.None, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } // Score mode = SUM - response = minMaxQuery(ScoreType.SUM, 0, 0); + response = minMaxQuery(ScoreMode.Total, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1704,7 +1671,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.SUM, 1, 0); + response = minMaxQuery(ScoreMode.Total, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1714,7 +1681,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.SUM, 2, 0); + response = minMaxQuery(ScoreMode.Total, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1722,17 +1689,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(3f)); - response = minMaxQuery(ScoreType.SUM, 3, 0); + response = minMaxQuery(ScoreMode.Total, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(6f)); - response = minMaxQuery(ScoreType.SUM, 4, 0); + response = minMaxQuery(ScoreMode.Total, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery(ScoreType.SUM, 0, 4); + response = minMaxQuery(ScoreMode.Total, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1742,7 +1709,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.SUM, 0, 3); + response = minMaxQuery(ScoreMode.Total, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1752,7 +1719,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.SUM, 0, 2); + response = minMaxQuery(ScoreMode.Total, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1760,21 +1727,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.SUM, 2, 2); + response = minMaxQuery(ScoreMode.Total, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(3f)); try { - response = minMaxQuery(ScoreType.SUM, 3, 2); + response = minMaxQuery(ScoreMode.Total, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } // Score mode = MAX - response = minMaxQuery(ScoreType.MAX, 0, 0); + response = minMaxQuery(ScoreMode.Max, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1784,7 +1751,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.MAX, 1, 0); + response = minMaxQuery(ScoreMode.Max, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1794,7 +1761,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.MAX, 2, 0); + response = minMaxQuery(ScoreMode.Max, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1802,17 +1769,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(2f)); - response = minMaxQuery(ScoreType.MAX, 3, 0); + response = minMaxQuery(ScoreMode.Max, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(3f)); - response = minMaxQuery(ScoreType.MAX, 4, 0); + response = minMaxQuery(ScoreMode.Max, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery(ScoreType.MAX, 0, 4); + response = minMaxQuery(ScoreMode.Max, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1822,7 +1789,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.MAX, 0, 3); + response = minMaxQuery(ScoreMode.Max, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1832,7 +1799,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.MAX, 0, 2); + response = minMaxQuery(ScoreMode.Max, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1840,21 +1807,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.MAX, 2, 2); + response = minMaxQuery(ScoreMode.Max, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(2f)); try { - response = minMaxQuery(ScoreType.MAX, 3, 2); + response = minMaxQuery(ScoreMode.Max, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } // Score mode = AVG - response = minMaxQuery(ScoreType.AVG, 0, 0); + response = minMaxQuery(ScoreMode.Avg, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1864,7 +1831,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.AVG, 1, 0); + response = minMaxQuery(ScoreMode.Avg, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1874,7 +1841,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.AVG, 2, 0); + response = minMaxQuery(ScoreMode.Avg, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1882,17 +1849,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); - response = minMaxQuery(ScoreType.AVG, 3, 0); + response = minMaxQuery(ScoreMode.Avg, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(2f)); - response = minMaxQuery(ScoreType.AVG, 4, 0); + response = minMaxQuery(ScoreMode.Avg, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery(ScoreType.AVG, 0, 4); + response = minMaxQuery(ScoreMode.Avg, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1902,7 +1869,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.AVG, 0, 3); + response = minMaxQuery(ScoreMode.Avg, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1912,7 +1879,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.AVG, 0, 2); + response = minMaxQuery(ScoreMode.Avg, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1920,99 +1887,18 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery(ScoreType.AVG, 2, 2); + response = minMaxQuery(ScoreMode.Avg, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(1.5f)); try { - response = minMaxQuery(ScoreType.AVG, 3, 2); + response = minMaxQuery(ScoreMode.Avg, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } - - // HasChildFilter - response = minMaxFilter(0, 0); - - assertThat(response.getHits().totalHits(), equalTo(3l)); - assertThat(response.getHits().hits()[0].id(), equalTo("2")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("3")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - assertThat(response.getHits().hits()[2].id(), equalTo("4")); - assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - - response = minMaxFilter(1, 0); - - assertThat(response.getHits().totalHits(), equalTo(3l)); - assertThat(response.getHits().hits()[0].id(), equalTo("2")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("3")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - assertThat(response.getHits().hits()[2].id(), equalTo("4")); - assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - - response = minMaxFilter(2, 0); - - assertThat(response.getHits().totalHits(), equalTo(2l)); - assertThat(response.getHits().hits()[0].id(), equalTo("3")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("4")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - - response = minMaxFilter(3, 0); - - assertThat(response.getHits().totalHits(), equalTo(1l)); - assertThat(response.getHits().hits()[0].id(), equalTo("4")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - - response = minMaxFilter(4, 0); - - assertThat(response.getHits().totalHits(), equalTo(0l)); - - response = minMaxFilter(0, 4); - - assertThat(response.getHits().totalHits(), equalTo(3l)); - assertThat(response.getHits().hits()[0].id(), equalTo("2")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("3")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - assertThat(response.getHits().hits()[2].id(), equalTo("4")); - assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - - response = minMaxFilter(0, 3); - - assertThat(response.getHits().totalHits(), equalTo(3l)); - assertThat(response.getHits().hits()[0].id(), equalTo("2")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("3")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - assertThat(response.getHits().hits()[2].id(), equalTo("4")); - assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - - response = minMaxFilter(0, 2); - - assertThat(response.getHits().totalHits(), equalTo(2l)); - assertThat(response.getHits().hits()[0].id(), equalTo("2")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - assertThat(response.getHits().hits()[1].id(), equalTo("3")); - assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - - response = minMaxFilter(2, 2); - - assertThat(response.getHits().totalHits(), equalTo(1l)); - assertThat(response.getHits().hits()[0].id(), equalTo("3")); - assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - - try { - response = minMaxFilter(3, 2); - fail(); - } catch (SearchPhaseExecutionException e) { - assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); - } - } @Test diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 2d2d72822b8..e2859708aba 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -53,6 +53,7 @@ import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; + public class DecayFunctionScoreIT extends ESIntegTestCase { @Test @@ -348,7 +349,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(0.153426408, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( @@ -359,7 +360,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( @@ -370,7 +371,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(2.0 * (0.30685282 + 0.5), 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(0.30685282 + 0.5, 1.e-5)); logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).id(), sr.getHits().getAt(0).explanation()); response = client().search( @@ -382,7 +383,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5), 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5) / 2, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( @@ -393,7 +394,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(2.0 * (0.30685282), 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( @@ -404,7 +405,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5)); } @@ -797,21 +798,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { refresh(); XContentBuilder query = XContentFactory.jsonBuilder(); - // query that contains a functions[] array but also a single function - query.startObject().startObject("function_score").startArray("functions").startObject().field("boost_factor", "1.3").endObject().endArray().field("boost_factor", "1").endObject().endObject(); - try { - client().search( - searchRequest().source( - searchSource().query(query))).actionGet(); - fail("Search should result in SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException e) { - logger.info(e.shardFailures()[0].reason()); - assertThat(e.shardFailures()[0].reason(), containsString("already found [functions] array, now encountering [boost_factor]. did you mean [boost] instead?")); - } - - query = XContentFactory.jsonBuilder(); // query that contains a single function and a functions[] array - query.startObject().startObject("function_score").field("boost_factor", "1").startArray("functions").startObject().field("boost_factor", "1.3").endObject().endArray().endObject().endObject(); + query.startObject().startObject("function_score").field("weight", "1").startArray("functions").startObject().startObject("script_score").field("script", "3").endObject().endObject().endArray().endObject().endObject(); try { client().search( searchRequest().source( @@ -819,7 +807,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { fail("Search should result in SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { logger.info(e.shardFailures()[0].reason()); - assertThat(e.shardFailures()[0].reason(), containsString("already found [boost_factor], now encountering [functions]. did you mean [boost] instead?")); + assertThat(e.shardFailures()[0].reason(), containsString("already found [weight], now encountering [functions].")); } query = XContentFactory.jsonBuilder(); @@ -887,7 +875,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { " \"text\": \"baseball\"\n" + " }\n" + " },\n" + - " \"boost_factor\": 2\n" + + " \"weight\": 2\n" + " },\n" + " {\n" + " \"filter\": {\n" + diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 7e138162aa6..9db8b98602c 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -178,7 +178,7 @@ public class FunctionScoreIT extends ESIntegTestCase { assertThat( responseWithWeights.getHits().getAt(0).getExplanation().toString(), - equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = min of:\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: *:*\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: *:*\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: *:*\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"[script: _index['text_field']['value'].tf(), type: inline, lang: null, params: null]\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")); + equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = min of:\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: *:*\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: *:*\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: *:*\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"[script: _index['text_field']['value'].tf(), type: inline, lang: null, params: null]\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n")); responseWithWeights = client().search( searchRequest().source( searchSource().query( @@ -186,7 +186,7 @@ public class FunctionScoreIT extends ESIntegTestCase { .explain(true))).actionGet(); assertThat( responseWithWeights.getHits().getAt(0).getExplanation().toString(), - equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = min of:\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")); + equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = min of:\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n")); } diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 506da40df5b..b8507d4b33d 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -19,11 +19,10 @@ package org.elasticsearch.search.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import com.google.common.collect.Sets; - import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -49,9 +48,28 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.index.query.QueryBuilders.*; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; public class MultiMatchQueryIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 1ac4d43b140..f5117efae38 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -74,27 +74,27 @@ public class SearchScrollIT extends ESIntegTestCase { .execute().actionGet(); try { long counter = 0; - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } - + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(2)) .execute().actionGet(); - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } - + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(2)) .execute().actionGet(); - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { @@ -133,47 +133,47 @@ public class SearchScrollIT extends ESIntegTestCase { .execute().actionGet(); try { long counter = 0; - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } - + for (int i = 0; i < 32; i++) { searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(2)) .execute().actionGet(); - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } } - + // and now, the last one is one searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(2)) .execute().actionGet(); - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } - + // a the last is zero searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(2)) .execute().actionGet(); - + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l)); assertThat(searchResponse.getHits().hits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++)); } - + } finally { clearScroll(searchResponse.getScrollId()); } @@ -212,7 +212,7 @@ public class SearchScrollIT extends ESIntegTestCase { } searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).execute().actionGet(); } while (searchResponse.getHits().hits().length > 0); - + client().admin().indices().prepareRefresh().execute().actionGet(); assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(500l)); assertThat(client().prepareCount().setQuery(termQuery("message", "test")).execute().actionGet().getCount(), equalTo(0l)); @@ -410,9 +410,7 @@ public class SearchScrollIT extends ESIntegTestCase { assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND); } - @Test - // https://github.com/elasticsearch/elasticsearch/issues/4156 - public void testDeepPaginationWithOneDocIndexAndDoNotBlowUp() throws Exception { + public void testDeepScrollingDoesNotBlowUp() throws Exception { client().prepareIndex("index", "type", "1") .setSource("field", "value") .setRefresh(true) @@ -422,11 +420,8 @@ public class SearchScrollIT extends ESIntegTestCase { SearchRequestBuilder builder = client().prepareSearch("index") .setSearchType(searchType) .setQuery(QueryBuilders.matchAllQuery()) - .setSize(Integer.MAX_VALUE); - - if (randomBoolean()) { - builder.setScroll("1m"); - } + .setSize(Integer.MAX_VALUE) + .setScroll("1m"); SearchResponse response = builder.execute().actionGet(); try { diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index ebf8b756e88..74e2ff37c3f 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -22,11 +22,13 @@ package org.elasticsearch.search.simple; import org.apache.lucene.util.Constants; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.test.ESIntegTestCase; -import org.junit.Test; import java.util.ArrayList; import java.util.List; @@ -38,12 +40,12 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; public class SimpleSearchIT extends ESIntegTestCase { - - @Test public void testSearchNullIndex() { try { client().prepareSearch((String) null).setQuery(QueryBuilders.termQuery("_id", "XXX1")).execute().actionGet(); @@ -60,7 +62,6 @@ public class SimpleSearchIT extends ESIntegTestCase { } } - @Test public void testSearchRandomPreference() throws InterruptedException, ExecutionException { createIndex("test"); indexRandom(true, client().prepareIndex("test", "type", "1").setSource("field", "value"), @@ -84,8 +85,7 @@ public class SimpleSearchIT extends ESIntegTestCase { } } - @Test - public void simpleIpTests() throws Exception { + public void testSimpleIp() throws Exception { createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type1") @@ -104,8 +104,7 @@ public class SimpleSearchIT extends ESIntegTestCase { assertHitCount(search, 1l); } - @Test - public void simpleIdTests() { + public void testSimpleId() { createIndex("test"); client().prepareIndex("test", "type", "XXX1").setSource("field", "value").setRefresh(true).execute().actionGet(); @@ -124,8 +123,7 @@ public class SimpleSearchIT extends ESIntegTestCase { assertHitCount(searchResponse, 1l); } - @Test - public void simpleDateRangeTests() throws Exception { + public void testSimpleDateRange() throws Exception { createIndex("test"); client().prepareIndex("test", "type1", "1").setSource("field", "2010-01-05T02:00").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "2010-01-06T02:00").execute().actionGet(); @@ -150,9 +148,8 @@ public class SimpleSearchIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("field:[2010-01-03||+2d TO 2010-01-04||+2d/d]")).execute().actionGet(); assertHitCount(searchResponse, 2l); } - - @Test - public void localeDependentDateTests() throws Exception { + + public void testLocaleDependentDate() throws Exception { assumeFalse("Locals are buggy on JDK9EA", Constants.JRE_IS_MINIMUM_JAVA9 && systemPropertyAsBoolean("tests.security.manager", false)); assertAcked(prepareCreate("test") .addMapping("type1", @@ -189,8 +186,7 @@ public class SimpleSearchIT extends ESIntegTestCase { } } - @Test - public void simpleTerminateAfterCountTests() throws Exception { + public void testSimpleTerminateAfterCount() throws Exception { prepareCreate("test").setSettings( SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).get(); @@ -225,16 +221,76 @@ public class SimpleSearchIT extends ESIntegTestCase { assertFalse(searchResponse.isTerminatedEarly()); } - @Test - public void testInsaneFrom() throws Exception { + public void testInsaneFromAndSize() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + assertWindowFails(client().prepareSearch("idx").setFrom(Integer.MAX_VALUE)); + assertWindowFails(client().prepareSearch("idx").setSize(Integer.MAX_VALUE)); + } + + public void testTooLargeFromAndSize() throws Exception { + createIndex("idx"); + indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + + assertWindowFails(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1)); + assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) + .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + } + + public void testLargeFromAndSizeSucceeds() throws Exception { + createIndex("idx"); + indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + + assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW - 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2) + .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2 - 1).get(), 1); + } + + public void testTooLargeFromAndSizeOkBySetting() throws Exception { + prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2).get(); + indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + + assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) + .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + } + + public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { + createIndex("idx"); + assertAcked(client().admin().indices().prepareUpdateSettings("idx") + .setSettings( + Settings.builder().put(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2)) + .get()); + indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + + assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) + .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + } + + public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception { + prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, Integer.MAX_VALUE).get(); + indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); + + assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10) + .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); + } + + private void assertWindowFails(SearchRequestBuilder search) { try { - client().prepareSearch("idx").setFrom(Integer.MAX_VALUE).get(); + search.get(); fail(); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString(), containsString("Result window is too large, from + size must be less than or equal to:")); + assertThat(e.toString(), containsString("Result window is too large, from + size must be less than or equal to: [" + + DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + assertThat(e.toString(), containsString("See the scroll api for a more efficient way to request large data sets")); } } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java index 28a5a1b7601..596c675e43f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.search.suggest; -import com.google.common.collect.Sets; - +import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; @@ -27,6 +26,7 @@ import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.search.suggest.Suggest.Suggestion; @@ -38,15 +38,21 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestionFuzzyBuil import org.elasticsearch.search.suggest.context.ContextBuilder; import org.elasticsearch.search.suggest.context.ContextMapping; import org.elasticsearch.test.ESIntegTestCase; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertDistance; import static org.hamcrest.Matchers.containsString; diff --git a/core/src/test/java/org/elasticsearch/test/ESTestCase.java b/core/src/test/java/org/elasticsearch/test/ESTestCase.java index 6d04adbe92f..6dcb4eda9e9 100644 --- a/core/src/test/java/org/elasticsearch/test/ESTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESTestCase.java @@ -498,6 +498,10 @@ public abstract class ESTestCase extends LuceneTestCase { } } + public Path getBwcIndicesPath() { + return getDataPath("/indices/bwc"); + } + /** Returns a random number of temporary paths. */ public String[] tmpPaths() { final int numPaths = TestUtil.nextInt(random(), 1, 3); diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 48ad3f8d7c7..012ce95cacd 100644 --- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -25,7 +25,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.google.common.collect.Iterators; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; @@ -65,6 +64,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexService; diff --git a/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java b/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java index f3e18d75124..33ddea019ae 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -155,6 +155,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { return Settings.builder() .putArray(URLRepository.ALLOWED_URLS_SETTING, "http://snapshot.test*") .put(Node.HTTP_ENABLED, true) + .put("node.testattr", "test") .put(super.nodeSettings(nodeOrdinal)).build(); } diff --git a/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java b/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java index 49bb01fe80b..b7173db0838 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/core/src/test/java/org/elasticsearch/test/rest/client/RestClient.java @@ -19,7 +19,6 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.google.common.collect.Sets; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; @@ -38,6 +37,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.spec.RestApi; diff --git a/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java index 923ba93521c..d53671bc6bc 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java +++ b/core/src/test/java/org/elasticsearch/test/rest/section/RestTestSuite.java @@ -18,11 +18,10 @@ */ package org.elasticsearch.test.rest.section; -import com.google.common.collect.Sets; - import java.util.ArrayList; import java.util.List; import java.util.Set; +import java.util.TreeSet; /** * Holds a REST test suite loaded from a specific yaml file. @@ -35,7 +34,7 @@ public class RestTestSuite { private SetupSection setupSection; - private Set testSections = Sets.newTreeSet(); + private Set testSections = new TreeSet<>(); public RestTestSuite(String api, String name) { this.api = api; diff --git a/core/src/test/java/org/elasticsearch/test/rest/support/FileUtils.java b/core/src/test/java/org/elasticsearch/test/rest/support/FileUtils.java index 7790d913016..e73906efff7 100644 --- a/core/src/test/java/org/elasticsearch/test/rest/support/FileUtils.java +++ b/core/src/test/java/org/elasticsearch/test/rest/support/FileUtils.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.test.rest.support; -import com.google.common.collect.Sets; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; @@ -156,7 +155,7 @@ public final class FileUtils { String groupName = file.toAbsolutePath().getParent().getFileName().toString(); Set filesSet = files.get(groupName); if (filesSet == null) { - filesSet = Sets.newHashSet(); + filesSet = new HashSet<>(); files.put(groupName, filesSet); } filesSet.add(file); diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index bf791855bba..0bf04918a02 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.threadpool; -import com.google.common.collect.Sets; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -44,16 +43,24 @@ import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.*; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Executor; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; /** */ @@ -69,7 +76,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { public void verifyThreadNames() throws Exception { ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); - Set preNodeStartThreadNames = Sets.newHashSet(); + Set preNodeStartThreadNames = new HashSet<>(); for (long l : threadBean.getAllThreadIds()) { ThreadInfo threadInfo = threadBean.getThreadInfo(l); if (threadInfo != null) { @@ -98,7 +105,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { assertNoFailures(client().prepareSearch("idx").setQuery(QueryBuilders.termQuery("str_value", "s" + i)).get()); assertNoFailures(client().prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i)).get()); } - Set threadNames = Sets.newHashSet(); + Set threadNames = new HashSet<>(); for (long l : threadBean.getAllThreadIds()) { ThreadInfo threadInfo = threadBean.getThreadInfo(l); if (threadInfo != null) { diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 1f8b7f165a5..c9d7d939dc6 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -442,4 +442,4 @@ public class TribeIT extends ESIntegTestCase { } return unicastHosts.toArray(new String[unicastHosts.size()]); } -} \ No newline at end of file +} diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.Beta1.zip b/core/src/test/resources/indices/bwc/index-0.90.0.Beta1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.Beta1.zip rename to core/src/test/resources/indices/bwc/index-0.90.0.Beta1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.RC1.zip b/core/src/test/resources/indices/bwc/index-0.90.0.RC1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.RC1.zip rename to core/src/test/resources/indices/bwc/index-0.90.0.RC1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.RC2.zip b/core/src/test/resources/indices/bwc/index-0.90.0.RC2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.RC2.zip rename to core/src/test/resources/indices/bwc/index-0.90.0.RC2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.zip b/core/src/test/resources/indices/bwc/index-0.90.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.0.zip rename to core/src/test/resources/indices/bwc/index-0.90.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.1.zip b/core/src/test/resources/indices/bwc/index-0.90.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.1.zip rename to core/src/test/resources/indices/bwc/index-0.90.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.10.zip b/core/src/test/resources/indices/bwc/index-0.90.10.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.10.zip rename to core/src/test/resources/indices/bwc/index-0.90.10.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.11.zip b/core/src/test/resources/indices/bwc/index-0.90.11.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.11.zip rename to core/src/test/resources/indices/bwc/index-0.90.11.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.12.zip b/core/src/test/resources/indices/bwc/index-0.90.12.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.12.zip rename to core/src/test/resources/indices/bwc/index-0.90.12.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.13.zip b/core/src/test/resources/indices/bwc/index-0.90.13.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.13.zip rename to core/src/test/resources/indices/bwc/index-0.90.13.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.2.zip b/core/src/test/resources/indices/bwc/index-0.90.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.2.zip rename to core/src/test/resources/indices/bwc/index-0.90.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.3.zip b/core/src/test/resources/indices/bwc/index-0.90.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.3.zip rename to core/src/test/resources/indices/bwc/index-0.90.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.4.zip b/core/src/test/resources/indices/bwc/index-0.90.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.4.zip rename to core/src/test/resources/indices/bwc/index-0.90.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.5.zip b/core/src/test/resources/indices/bwc/index-0.90.5.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.5.zip rename to core/src/test/resources/indices/bwc/index-0.90.5.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.6.zip b/core/src/test/resources/indices/bwc/index-0.90.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.6.zip rename to core/src/test/resources/indices/bwc/index-0.90.6.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.7.zip b/core/src/test/resources/indices/bwc/index-0.90.7.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.7.zip rename to core/src/test/resources/indices/bwc/index-0.90.7.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.8.zip b/core/src/test/resources/indices/bwc/index-0.90.8.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.8.zip rename to core/src/test/resources/indices/bwc/index-0.90.8.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.9.zip b/core/src/test/resources/indices/bwc/index-0.90.9.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-0.90.9.zip rename to core/src/test/resources/indices/bwc/index-0.90.9.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.Beta1.zip b/core/src/test/resources/indices/bwc/index-1.0.0.Beta1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.Beta1.zip rename to core/src/test/resources/indices/bwc/index-1.0.0.Beta1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.Beta2.zip b/core/src/test/resources/indices/bwc/index-1.0.0.Beta2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.Beta2.zip rename to core/src/test/resources/indices/bwc/index-1.0.0.Beta2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.RC1.zip b/core/src/test/resources/indices/bwc/index-1.0.0.RC1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.RC1.zip rename to core/src/test/resources/indices/bwc/index-1.0.0.RC1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.RC2.zip b/core/src/test/resources/indices/bwc/index-1.0.0.RC2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.RC2.zip rename to core/src/test/resources/indices/bwc/index-1.0.0.RC2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.zip b/core/src/test/resources/indices/bwc/index-1.0.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.0.zip rename to core/src/test/resources/indices/bwc/index-1.0.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.1.zip b/core/src/test/resources/indices/bwc/index-1.0.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.1.zip rename to core/src/test/resources/indices/bwc/index-1.0.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.2.zip b/core/src/test/resources/indices/bwc/index-1.0.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.2.zip rename to core/src/test/resources/indices/bwc/index-1.0.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.3.zip b/core/src/test/resources/indices/bwc/index-1.0.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.0.3.zip rename to core/src/test/resources/indices/bwc/index-1.0.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.0.zip b/core/src/test/resources/indices/bwc/index-1.1.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.0.zip rename to core/src/test/resources/indices/bwc/index-1.1.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.1.zip b/core/src/test/resources/indices/bwc/index-1.1.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.1.zip rename to core/src/test/resources/indices/bwc/index-1.1.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.2.zip b/core/src/test/resources/indices/bwc/index-1.1.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.1.2.zip rename to core/src/test/resources/indices/bwc/index-1.1.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.0.zip b/core/src/test/resources/indices/bwc/index-1.2.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.0.zip rename to core/src/test/resources/indices/bwc/index-1.2.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.1.zip b/core/src/test/resources/indices/bwc/index-1.2.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.1.zip rename to core/src/test/resources/indices/bwc/index-1.2.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.2.zip b/core/src/test/resources/indices/bwc/index-1.2.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.2.zip rename to core/src/test/resources/indices/bwc/index-1.2.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.3.zip b/core/src/test/resources/indices/bwc/index-1.2.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.3.zip rename to core/src/test/resources/indices/bwc/index-1.2.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.4.zip b/core/src/test/resources/indices/bwc/index-1.2.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.2.4.zip rename to core/src/test/resources/indices/bwc/index-1.2.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.0.zip b/core/src/test/resources/indices/bwc/index-1.3.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.0.zip rename to core/src/test/resources/indices/bwc/index-1.3.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.1.zip b/core/src/test/resources/indices/bwc/index-1.3.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.1.zip rename to core/src/test/resources/indices/bwc/index-1.3.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.2.zip b/core/src/test/resources/indices/bwc/index-1.3.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.2.zip rename to core/src/test/resources/indices/bwc/index-1.3.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.3.zip b/core/src/test/resources/indices/bwc/index-1.3.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.3.zip rename to core/src/test/resources/indices/bwc/index-1.3.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.4.zip b/core/src/test/resources/indices/bwc/index-1.3.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.4.zip rename to core/src/test/resources/indices/bwc/index-1.3.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.5.zip b/core/src/test/resources/indices/bwc/index-1.3.5.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.5.zip rename to core/src/test/resources/indices/bwc/index-1.3.5.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.6.zip b/core/src/test/resources/indices/bwc/index-1.3.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.6.zip rename to core/src/test/resources/indices/bwc/index-1.3.6.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.7.zip b/core/src/test/resources/indices/bwc/index-1.3.7.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.7.zip rename to core/src/test/resources/indices/bwc/index-1.3.7.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.8.zip b/core/src/test/resources/indices/bwc/index-1.3.8.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.8.zip rename to core/src/test/resources/indices/bwc/index-1.3.8.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.9.zip b/core/src/test/resources/indices/bwc/index-1.3.9.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.3.9.zip rename to core/src/test/resources/indices/bwc/index-1.3.9.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.0.Beta1.zip b/core/src/test/resources/indices/bwc/index-1.4.0.Beta1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.0.Beta1.zip rename to core/src/test/resources/indices/bwc/index-1.4.0.Beta1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.0.zip b/core/src/test/resources/indices/bwc/index-1.4.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.0.zip rename to core/src/test/resources/indices/bwc/index-1.4.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.1.zip b/core/src/test/resources/indices/bwc/index-1.4.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.1.zip rename to core/src/test/resources/indices/bwc/index-1.4.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.2.zip b/core/src/test/resources/indices/bwc/index-1.4.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.2.zip rename to core/src/test/resources/indices/bwc/index-1.4.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.3.zip b/core/src/test/resources/indices/bwc/index-1.4.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.3.zip rename to core/src/test/resources/indices/bwc/index-1.4.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.4.zip b/core/src/test/resources/indices/bwc/index-1.4.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.4.zip rename to core/src/test/resources/indices/bwc/index-1.4.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.5.zip b/core/src/test/resources/indices/bwc/index-1.4.5.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.4.5.zip rename to core/src/test/resources/indices/bwc/index-1.4.5.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip b/core/src/test/resources/indices/bwc/index-1.5.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip rename to core/src/test/resources/indices/bwc/index-1.5.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.1.zip b/core/src/test/resources/indices/bwc/index-1.5.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.1.zip rename to core/src/test/resources/indices/bwc/index-1.5.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.2.zip b/core/src/test/resources/indices/bwc/index-1.5.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.5.2.zip rename to core/src/test/resources/indices/bwc/index-1.5.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.0.zip b/core/src/test/resources/indices/bwc/index-1.6.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.0.zip rename to core/src/test/resources/indices/bwc/index-1.6.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.1.zip b/core/src/test/resources/indices/bwc/index-1.6.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.1.zip rename to core/src/test/resources/indices/bwc/index-1.6.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.2.zip b/core/src/test/resources/indices/bwc/index-1.6.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.6.2.zip rename to core/src/test/resources/indices/bwc/index-1.6.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.7.0.zip b/core/src/test/resources/indices/bwc/index-1.7.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.7.0.zip rename to core/src/test/resources/indices/bwc/index-1.7.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/index-1.7.1.zip b/core/src/test/resources/indices/bwc/index-1.7.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/index-1.7.1.zip rename to core/src/test/resources/indices/bwc/index-1.7.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.Beta2.zip b/core/src/test/resources/indices/bwc/repo-1.0.0.Beta2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.Beta2.zip rename to core/src/test/resources/indices/bwc/repo-1.0.0.Beta2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.RC1.zip b/core/src/test/resources/indices/bwc/repo-1.0.0.RC1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.RC1.zip rename to core/src/test/resources/indices/bwc/repo-1.0.0.RC1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.RC2.zip b/core/src/test/resources/indices/bwc/repo-1.0.0.RC2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.RC2.zip rename to core/src/test/resources/indices/bwc/repo-1.0.0.RC2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.zip b/core/src/test/resources/indices/bwc/repo-1.0.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.0.zip rename to core/src/test/resources/indices/bwc/repo-1.0.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.1.zip b/core/src/test/resources/indices/bwc/repo-1.0.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.1.zip rename to core/src/test/resources/indices/bwc/repo-1.0.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.2.zip b/core/src/test/resources/indices/bwc/repo-1.0.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.2.zip rename to core/src/test/resources/indices/bwc/repo-1.0.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.3.zip b/core/src/test/resources/indices/bwc/repo-1.0.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.0.3.zip rename to core/src/test/resources/indices/bwc/repo-1.0.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.0.zip b/core/src/test/resources/indices/bwc/repo-1.1.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.0.zip rename to core/src/test/resources/indices/bwc/repo-1.1.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.1.zip b/core/src/test/resources/indices/bwc/repo-1.1.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.1.zip rename to core/src/test/resources/indices/bwc/repo-1.1.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.2.zip b/core/src/test/resources/indices/bwc/repo-1.1.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.1.2.zip rename to core/src/test/resources/indices/bwc/repo-1.1.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.0.zip b/core/src/test/resources/indices/bwc/repo-1.2.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.0.zip rename to core/src/test/resources/indices/bwc/repo-1.2.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.1.zip b/core/src/test/resources/indices/bwc/repo-1.2.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.1.zip rename to core/src/test/resources/indices/bwc/repo-1.2.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.2.zip b/core/src/test/resources/indices/bwc/repo-1.2.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.2.zip rename to core/src/test/resources/indices/bwc/repo-1.2.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.3.zip b/core/src/test/resources/indices/bwc/repo-1.2.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.3.zip rename to core/src/test/resources/indices/bwc/repo-1.2.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.4.zip b/core/src/test/resources/indices/bwc/repo-1.2.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.2.4.zip rename to core/src/test/resources/indices/bwc/repo-1.2.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.0.zip b/core/src/test/resources/indices/bwc/repo-1.3.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.0.zip rename to core/src/test/resources/indices/bwc/repo-1.3.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.1.zip b/core/src/test/resources/indices/bwc/repo-1.3.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.1.zip rename to core/src/test/resources/indices/bwc/repo-1.3.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.2.zip b/core/src/test/resources/indices/bwc/repo-1.3.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.2.zip rename to core/src/test/resources/indices/bwc/repo-1.3.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.3.zip b/core/src/test/resources/indices/bwc/repo-1.3.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.3.zip rename to core/src/test/resources/indices/bwc/repo-1.3.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.4.zip b/core/src/test/resources/indices/bwc/repo-1.3.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.4.zip rename to core/src/test/resources/indices/bwc/repo-1.3.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.5.zip b/core/src/test/resources/indices/bwc/repo-1.3.5.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.5.zip rename to core/src/test/resources/indices/bwc/repo-1.3.5.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.6.zip b/core/src/test/resources/indices/bwc/repo-1.3.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.6.zip rename to core/src/test/resources/indices/bwc/repo-1.3.6.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.7.zip b/core/src/test/resources/indices/bwc/repo-1.3.7.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.7.zip rename to core/src/test/resources/indices/bwc/repo-1.3.7.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.8.zip b/core/src/test/resources/indices/bwc/repo-1.3.8.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.8.zip rename to core/src/test/resources/indices/bwc/repo-1.3.8.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.9.zip b/core/src/test/resources/indices/bwc/repo-1.3.9.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.3.9.zip rename to core/src/test/resources/indices/bwc/repo-1.3.9.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.0.Beta1.zip b/core/src/test/resources/indices/bwc/repo-1.4.0.Beta1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.0.Beta1.zip rename to core/src/test/resources/indices/bwc/repo-1.4.0.Beta1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.0.zip b/core/src/test/resources/indices/bwc/repo-1.4.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.0.zip rename to core/src/test/resources/indices/bwc/repo-1.4.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.1.zip b/core/src/test/resources/indices/bwc/repo-1.4.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.1.zip rename to core/src/test/resources/indices/bwc/repo-1.4.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.2.zip b/core/src/test/resources/indices/bwc/repo-1.4.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.2.zip rename to core/src/test/resources/indices/bwc/repo-1.4.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.3.zip b/core/src/test/resources/indices/bwc/repo-1.4.3.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.3.zip rename to core/src/test/resources/indices/bwc/repo-1.4.3.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.4.zip b/core/src/test/resources/indices/bwc/repo-1.4.4.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.4.zip rename to core/src/test/resources/indices/bwc/repo-1.4.4.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.5.zip b/core/src/test/resources/indices/bwc/repo-1.4.5.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.4.5.zip rename to core/src/test/resources/indices/bwc/repo-1.4.5.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip b/core/src/test/resources/indices/bwc/repo-1.5.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip rename to core/src/test/resources/indices/bwc/repo-1.5.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.1.zip b/core/src/test/resources/indices/bwc/repo-1.5.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.1.zip rename to core/src/test/resources/indices/bwc/repo-1.5.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.2.zip b/core/src/test/resources/indices/bwc/repo-1.5.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.2.zip rename to core/src/test/resources/indices/bwc/repo-1.5.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.0.zip b/core/src/test/resources/indices/bwc/repo-1.6.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.0.zip rename to core/src/test/resources/indices/bwc/repo-1.6.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.1.zip b/core/src/test/resources/indices/bwc/repo-1.6.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.1.zip rename to core/src/test/resources/indices/bwc/repo-1.6.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.2.zip b/core/src/test/resources/indices/bwc/repo-1.6.2.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.6.2.zip rename to core/src/test/resources/indices/bwc/repo-1.6.2.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.7.0.zip b/core/src/test/resources/indices/bwc/repo-1.7.0.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.7.0.zip rename to core/src/test/resources/indices/bwc/repo-1.7.0.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/repo-1.7.1.zip b/core/src/test/resources/indices/bwc/repo-1.7.1.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/repo-1.7.1.zip rename to core/src/test/resources/indices/bwc/repo-1.7.1.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/unsupported-0.20.6.zip b/core/src/test/resources/indices/bwc/unsupported-0.20.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/unsupported-0.20.6.zip rename to core/src/test/resources/indices/bwc/unsupported-0.20.6.zip diff --git a/core/src/test/resources/org/elasticsearch/bwcompat/unsupportedrepo-0.20.6.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-0.20.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/bwcompat/unsupportedrepo-0.20.6.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-0.20.6.zip diff --git a/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json b/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json index e78c54973a6..a7b4790fb75 100644 --- a/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json +++ b/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json @@ -9,18 +9,12 @@ }, "functions": [ { - "boost_factor": 3, + "weight": 3, "filter": { - term:{ + "term":{ "name.last":"banon" } } - }, - { - "boost_factor": 3 - }, - { - "boost_factor": 3 } ], "boost" : 3, diff --git a/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json b/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json index 176131c4cd1..38d4483e9fc 100644 --- a/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json +++ b/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json @@ -12,7 +12,7 @@ } }, "child_type" : "child", - "score_type" : "avg", + "score_mode" : "avg", "min_children" : 883170873, "max_children" : 1217235442, "boost" : 2.0, diff --git a/dev-tools/Elasticsearch.launch b/dev-tools/Elasticsearch.launch index 2016518cd23..5995612c80b 100644 --- a/dev-tools/Elasticsearch.launch +++ b/dev-tools/Elasticsearch.launch @@ -14,5 +14,5 @@ - + diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 22ba7f78ce5..d780b2af5e2 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -333,7 +333,7 @@ def parse_config(): help='Recreate all existing backwards compatibility indexes') parser.add_argument('--releases-dir', '-d', default='backwards', metavar='DIR', help='The directory containing elasticsearch releases') - parser.add_argument('--output-dir', '-o', default='core/src/test/resources/org/elasticsearch/bwcompat', + parser.add_argument('--output-dir', '-o', default='core/src/test/resources/indices/bwc', help='The directory to write the zipped index into') parser.add_argument('--tcp-port', default=DEFAULT_TRANSPORT_TCP_PORT, type=int, help='The port to use as the minimum port for TCP communication') diff --git a/dev-tools/create_bwc_repo_with_ancient_indices.py b/dev-tools/create_bwc_repo_with_ancient_indices.py index 27d166d2b6a..5c6b8222b02 100644 --- a/dev-tools/create_bwc_repo_with_ancient_indices.py +++ b/dev-tools/create_bwc_repo_with_ancient_indices.py @@ -63,7 +63,7 @@ def main(): create_bwc_index.shutdown_node(node) print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8'))) - create_bwc_index.compress(tmp_dir, "src/test/resources/org/elasticsearch/bwcompat", 'unsupportedrepo-%s.zip' % first_version, 'repo') + create_bwc_index.compress(tmp_dir, "src/test/resources/indices/bwc", 'unsupportedrepo-%s.zip' % first_version, 'repo') node = None finally: diff --git a/dev-tools/src/main/resources/ant/integration-tests.xml b/dev-tools/src/main/resources/ant/integration-tests.xml index 23df37338de..f64f4403157 100644 --- a/dev-tools/src/main/resources/ant/integration-tests.xml +++ b/dev-tools/src/main/resources/ant/integration-tests.xml @@ -157,7 +157,7 @@ Starting up external cluster... - @@ -172,6 +172,8 @@ + + diff --git a/dev-tools/src/main/resources/forbidden/core-signatures.txt b/dev-tools/src/main/resources/forbidden/core-signatures.txt index de3576f7619..92792ca3ca3 100644 --- a/dev-tools/src/main/resources/forbidden/core-signatures.txt +++ b/dev-tools/src/main/resources/forbidden/core-signatures.txt @@ -93,3 +93,9 @@ com.google.common.base.Predicates com.google.common.base.Strings com.google.common.base.Throwables com.google.common.collect.Maps +com.google.common.collect.Sets +com.google.common.base.Preconditions#checkNotNull(java.lang.Object) +com.google.common.base.Preconditions#checkNotNull(java.lang.Object, java.lang.Object) +com.google.common.base.Preconditions#checkNotNull(java.lang.Object, java.lang.String, java.lang.Object[]) +com.google.common.collect.ImmutableSortedSet +com.google.common.collect.Queues diff --git a/dev-tools/update_lucene.sh b/dev-tools/update_lucene.sh new file mode 100644 index 00000000000..4918c126222 --- /dev/null +++ b/dev-tools/update_lucene.sh @@ -0,0 +1,14 @@ +#!/bin/sh +mvn install -DskipTests +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update distribution/licenses/ distribution/zip/target/releases/elasticsearch-3.0.0-SNAPSHOT.zip elasticsearch-3.0.0-SNAPSHOT +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update plugins/analysis-icu/licenses/ plugins/analysis-icu/target/releases/analysis-icu-3.0.0-SNAPSHOT.zip analysis-icu-3.0.0-SNAPSHOT +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update plugins/analysis-kuromoji/licenses/ plugins/analysis-kuromoji/target/releases/analysis-kuromoji-3.0.0-SNAPSHOT.zip analysis-kuromoji-3.0.0-SNAPSHOT +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update plugins/analysis-phonetic/licenses/ plugins/analysis-phonetic/target/releases/analysis-phonetic-3.0.0-SNAPSHOT.zip analysis-phonetic-3.0.0-SNAPSHOT +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update plugins/analysis-smartcn/licenses/ plugins/analysis-smartcn/target/releases/analysis-smartcn-3.0.0-SNAPSHOT.zip analysis-smartcn-3.0.0-SNAPSHOT +perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \ + --update plugins/analysis-stempel/licenses/ plugins/analysis-stempel/target/releases/analysis-stempel-3.0.0-SNAPSHOT.zip analysis-stempel-3.0.0-SNAPSHOT diff --git a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 3eff5a77688..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35fca29c4597a15ce4d4eb7dc73a517038684a27 diff --git a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..41febf78e08 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +8243b938b75818e86aa8d270d8d99529e1847578 diff --git a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 6fe76092653..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e4769b5c05fad8339f4eaf9cfa9e850cbeaa10ec diff --git a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..17f6316e8dd --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +ba85c6e5e77e1f76c52c31d34a59558afa135d47 diff --git a/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index fb5b2dd2e54..00000000000 --- a/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3bbab9d7a395bd0b6cc8b5bee26287105c8659e8 diff --git a/distribution/licenses/lucene-core-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-core-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..e6198d478b4 --- /dev/null +++ b/distribution/licenses/lucene-core-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +f8a38658b6393015c9b33c16b1b4122167b526b2 diff --git a/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 000759a2842..00000000000 --- a/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d60476428e7d3d8a68fe491d42dbda0d4024f589 diff --git a/distribution/licenses/lucene-expressions-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-expressions-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..ff8fd3666a0 --- /dev/null +++ b/distribution/licenses/lucene-expressions-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +fa5d27ecadbe346caaf5a571ba71944b51761acf diff --git a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 1688910396e..00000000000 --- a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8618da3f400f0a4b140f196bbbecb0686fe754db diff --git a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..7acdc72f103 --- /dev/null +++ b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +2c1464fcf6ede7819f8ba434b9bc7c79f5968407 diff --git a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 5b6a48e527b..00000000000 --- a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c7db4fe5587d08ab23b253c622566462aab6796a diff --git a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..9f46054da43 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +a40f9a3ef224bc042ef2ad1b713e318911b6057a diff --git a/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 9dbe3284449..00000000000 --- a/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f9c8d435d3e1d553b0dca05c99b1fa377568eed0 diff --git a/distribution/licenses/lucene-join-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-join-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..01a2d77b40f --- /dev/null +++ b/distribution/licenses/lucene-join-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +0a7642c9b98cb3d9013fb33be5c0751baf9f0b31 diff --git a/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 1c0f2f57d56..00000000000 --- a/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -571dd2e4363f0a0410de04b3f3f4bbf66e782c31 diff --git a/distribution/licenses/lucene-memory-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-memory-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..6f9b8b4ecf1 --- /dev/null +++ b/distribution/licenses/lucene-memory-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +a0d6461ab9cda93ea530560b0c074a28fe0dd717 diff --git a/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 0d44482a658..00000000000 --- a/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -423264f839aace3b9159a0dd54f56c250458fd46 diff --git a/distribution/licenses/lucene-misc-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-misc-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..f274d1264de --- /dev/null +++ b/distribution/licenses/lucene-misc-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +85c5c7b78715c50157700c90ffd101537446533d diff --git a/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index a4391c68e60..00000000000 --- a/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -872530eeac156faa0989eb87145bbef74a72e66f diff --git a/distribution/licenses/lucene-queries-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-queries-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..295a311e3be --- /dev/null +++ b/distribution/licenses/lucene-queries-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +70ca782d6ed458b5f777141353e09600083ed4fe diff --git a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 29c85e8917f..00000000000 --- a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6f6b6a024ca96017252efea6d2fc7dc97c69febd diff --git a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..88c548a7690 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +b4832cdfe7a6cc7c586a3e28d7cd530acb182232 diff --git a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index dbc3ec8c8fe..00000000000 --- a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6f5a5c84b165ebde104cdcde46fa9c5948650f0 diff --git a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..54f5944099d --- /dev/null +++ b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +bde73ae2b2324e1576c5789a7e6dd88b6543b939 diff --git a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 1e2c1dc7176..00000000000 --- a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a305601f93b6cb02444816c96276a74f91ac7d40 diff --git a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..f3aa3630484 --- /dev/null +++ b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +8d261ff1c2333ce1e040c3aefca9784d1ae71acc diff --git a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index ab2be14bc16..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ef1fcaa5b6663dd9382719a1ad40d86fc962c690 diff --git a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..d23a2710c0a --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +ee041e52dfcdb33a1aa6fab112042b5f33fc0c0c diff --git a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index dd69c53dbc1..00000000000 --- a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3698e0623f45e181d2ceead46e48a6dd8c2867dd diff --git a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1702265.jar.sha1 b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..f2307a3bc86 --- /dev/null +++ b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +a8ceb11b26e53612eee9a265ff454351f6dc99f2 diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 0cb46483349..52e31507b4f 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -2,8 +2,7 @@ == Integrations -Integrations are not plugins, instead they are external tools or modules which -make it easier to work with Elasticsearch. +Integrations are not plugins, but are external tools or modules that make it easier to work with Elasticsearch. [float] [[cms-integrations]] @@ -29,13 +28,24 @@ make it easier to work with Elasticsearch. search (facets, etc), along with some Natural Language Processing features (ex.: More like this) - [float] [[data-integrations]] === Data import/export and validation -NOTE: Rivers were used to import data from external systems into -Elasticsearch, but they are no longer supported in Elasticsearch 2.0. +NOTE: Rivers were used to import data from external systems into Elasticsearch prior to the 2.0 release. Elasticsearch +releases 2.0 and later do not support rivers. + +[float] +==== Supported by the community: + +* https://www.elastic.co/guide/en/logstash/current/plugins-outputs-elasticsearch.html[Logstash output to Elasticsearch]: + The Logstash `elasticsearch` output plugin. +* https://www.elastic.co/guide/en/logstash/current/plugins-inputs-elasticsearch.html[Elasticsearch input to Logstash] + The Logstash `elasticsearch` input plugin. +* https://www.elastic.co/guide/en/logstash/current/plugins-filters-elasticsearch.html[Elasticsearch event filtering in Logstash] + The Logstash `elasticearch` filter plugin. +* https://www.elastic.co/guide/en/logstash/current/plugins-codecs-es_bulk.html[Elasticsearch bulk codec] + The Logstash `es_bulk` plugin decodes the Elasticsearch bulk format into individual events. [float] ==== Supported by the community: @@ -44,15 +54,14 @@ Elasticsearch, but they are no longer supported in Elasticsearch 2.0. The Java Database Connection (JDBC) importer allows to fetch data from JDBC sources for indexing into Elasticsearch (by Jörg Prante) * https://github.com/reachkrishnaraj/kafka-elasticsearch-standalone-consumer[Kafka Standalone Consumer]: - Easily Scaleable & Extendable, Kafka Standalone Consumer that will read the messages from Kafka, processes and index them in ElasticSearch + Easily Scalable & Extendable Kafka Standalone Consumer that reads messages from Kafka, then processes and indexes the messages in ElasticSearch * https://github.com/ozlerhakan/mongolastic[Mongolastic]: - A tool that clone data from ElasticSearch to MongoDB and vice versa + A tool that clones data from ElasticSearch to MongoDB and vice versa * https://github.com/Aconex/scrutineer[Scrutineer]: A high performance consistency checker to compare what you've indexed - with your source of truth content (e.g. DB) - + with your source of truth content [float] [[deployment]] @@ -207,7 +216,7 @@ These projects appear to have been abandoned: Protocol dissection for Zen discovery, HTTP and the binary protocol -These projects appears to have been abandoned: +These projects appear to have been abandoned: * http://www.github.com/neogenix/daikon[daikon]: Daikon Elasticsearch CLI @@ -216,5 +225,4 @@ These projects appears to have been abandoned: A set of AngularJS directives that provide common visualizations for elasticsearch based on D3. * https://github.com/OlegKunitsyn/eslogd[eslogd]: - Linux daemon that replicates events to a central Elasticsearch server in real-time - + Linux daemon that replicates events to a central Elasticsearch server in realtime diff --git a/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc index bc8a6e13ffd..e01a067670b 100644 --- a/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc @@ -148,7 +148,7 @@ Example: -------------------------------------------------- { "query" : { - "filtered" : { "filter": { "range" : { "price" : { "to" : "500" } } } } + "constant_score" : { "filter": { "range" : { "price" : { "to" : "500" } } } } }, "aggs" : { "prices" : { diff --git a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc index 98286e9396f..d55fcd01018 100644 --- a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc @@ -9,8 +9,7 @@ Assuming the data consists of documents representing stock ticks, where each tic -------------------------------------------------- { "query" : { - "filtered" : { - "query" : { "match_all" : {}}, + "constant_score" : { "filter" : { "range" : { "timestamp" : { "from" : "now/1d+9.5h", "to" : "now/1d+16h" }} } diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index e69cfb43782..473287dbc61 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -862,17 +862,17 @@ In the previous section, we skipped over a little detail called the document sco But queries do not always need to produce scores, in particular when they are only used for "filtering" the document set. Elasticsearch detects these situations and automatically optimizes query execution in order not to compute useless scores. -To understand filters, let's first introduce the <>, which allows you to combine a query (like `match_all`, `match`, `bool`, etc.) together with another query which is only used for filtering. As an example, let's introduce the <>, which allows us to filter documents by a range of values. This is generally used for numeric or date filtering. +The <> that we introduced in the previous section also supports `filter` clauses which allow to use a query to restrict the documents that will be matched by other clauses, without changing how scores are computed. As an example, let's introduce the <>, which allows us to filter documents by a range of values. This is generally used for numeric or date filtering. -This example uses a filtered query to return all accounts with balances between 20000 and 30000, inclusive. In other words, we want to find accounts with a balance that is greater than or equal to 20000 and less than or equal to 30000. +This example uses a bool query to return all accounts with balances between 20000 and 30000, inclusive. In other words, we want to find accounts with a balance that is greater than or equal to 20000 and less than or equal to 30000. [source,sh] -------------------------------------------------- curl -XPOST 'localhost:9200/bank/_search?pretty' -d ' { "query": { - "filtered": { - "query": { "match_all": {} }, + "bool": { + "must": { "match_all": {} }, "filter": { "range": { "balance": { @@ -886,9 +886,9 @@ curl -XPOST 'localhost:9200/bank/_search?pretty' -d ' }' -------------------------------------------------- -Dissecting the above, the filtered query contains a `match_all` query (the query part) and a `range` query (the filter part). We can substitute any other queries into the query and the filter parts. In the above case, the range query makes perfect sense since documents falling into the range all match "equally", i.e., no document is more relevant than another. +Dissecting the above, the bool query contains a `match_all` query (the query part) and a `range` query (the filter part). We can substitute any other queries into the query and the filter parts. In the above case, the range query makes perfect sense since documents falling into the range all match "equally", i.e., no document is more relevant than another. -In addition to the `match_all`, `match`, `bool`, `filtered`, and `range` queries, there are a lot of other query types that are available and we won't go into them here. Since we already have a basic understanding of how they work, it shouldn't be too difficult to apply this knowledge in learning and experimenting with the other query types. +In addition to the `match_all`, `match`, `bool`, and `range` queries, there are a lot of other query types that are available and we won't go into them here. Since we already have a basic understanding of how they work, it shouldn't be too difficult to apply this knowledge in learning and experimenting with the other query types. === Executing Aggregations diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 3a1c2de1385..78ede99ac62 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -98,6 +98,14 @@ specific index module: index visible to search. Defaults to `1s`. Can be set to `-1` to disable refresh. +`index.max_result_window`:: + + The maximum value of `from + size` for searches to this index. Defaults to + `10000`. Search requests take heap memory and time proportional to + `from + size` and this limits that memory. See + {ref}/search-request-scroll.html[Scroll] for a more efficient alternative + to raising this. + `index.blocks.read_only`:: Set to `true` to make the index and index metadata read only, `false` to @@ -184,5 +192,3 @@ include::index-modules/slowlog.asciidoc[] include::index-modules/store.asciidoc[] include::index-modules/translog.asciidoc[] - - diff --git a/docs/reference/migration/migrate_1_0.asciidoc b/docs/reference/migration/migrate_1_0.asciidoc index 66b1245855b..c8750d11b82 100644 --- a/docs/reference/migration/migrate_1_0.asciidoc +++ b/docs/reference/migration/migrate_1_0.asciidoc @@ -188,7 +188,7 @@ GET /_count Also, the top-level `filter` parameter in search has been renamed to <>, to indicate that it should not be used as the primary way to filter search results (use a -<> instead), but only to filter +<> instead), but only to filter results AFTER aggregations have been calculated. This example counts the top colors in all matching docs, but only returns docs diff --git a/docs/reference/migration/migrate_2_1.asciidoc b/docs/reference/migration/migrate_2_1.asciidoc index b51cf47c6aa..1dbef8bfafc 100644 --- a/docs/reference/migration/migrate_2_1.asciidoc +++ b/docs/reference/migration/migrate_2_1.asciidoc @@ -26,6 +26,16 @@ Scroll requests sorted by `_doc` have been optimized to more efficiently resume from where the previous request stopped, so this will have the same performance characteristics as the former `scan` search type. +==== from + size limits + +Elasticsearch will now return an error message if a query's `from` + `size` is +more than the `index.max_result_window` parameter. This parameter defaults to +10,000 which is safe for almost all clusters. Values higher than can consume +significant chunks of heap memory per search and per shard executing the +search. It's safest to leave this value as it is an use the scroll api for any +deep scrolling but this setting is dynamic so it can raised or lowered as +needed. + === Update changes ==== Updates now `detect_noop` by default diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index a0c96daba05..1d61fcaea19 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -71,4 +71,20 @@ with `_parent` field mapping created before version `2.0.0`. The data of these i The format of the join between parent and child documents have changed with the `2.0.0` release. The old format can't read from version `3.0.0` and onwards. The new format allows for a much more efficient and scalable join between parent and child documents and the join data structures are stored on on disk -data structures as opposed as before the join data structures were stored in the jvm heap space. \ No newline at end of file +data structures as opposed as before the join data structures were stored in the jvm heap space. + +==== `score_type` has been removed + +The `score_type` option has been removed from the `has_child` and `has_parent` queries in favour of the `score_mode` option +which does the exact same thing. + +==== `sum` score mode removed + +The `sum` score mode has been removed in favour of the `total` mode which doesn the same and is already available in +previous versions. + +==== `max_children` option + +When `max_children` was set to `0` on the `has_child` query then there was no upper limit on how many children documents +are allowed to match. This has changed and `0` now really means to zero child documents are allowed. If no upper limit +is needed then the `max_children` option shouldn't be defined at all on the `has_child` query. \ No newline at end of file diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 6b896aa4133..ee3cbc17f5f 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -69,6 +69,11 @@ cluster.routing.allocation.awareness.attributes: rack_id,zone NOTE: When using awareness attributes, shards will not be allocated to nodes that don't have values set for those attributes. +NOTE: Number of primary/replica of a shard allocated on a specific group +of nodes with the same awareness attribute value is determined by the number +of attribute values. When the number of nodes in groups is unbalanced and +there are many replicas, replica shards may be left unassigned. + [float] [[forced-awareness]] === Forced Awareness diff --git a/docs/reference/query-dsl/and-query.asciidoc b/docs/reference/query-dsl/and-query.asciidoc deleted file mode 100644 index 5ef23af2c1a..00000000000 --- a/docs/reference/query-dsl/and-query.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -[[query-dsl-and-query]] -=== And Query - -deprecated[2.0.0-beta1, Use the `bool` query instead] - -A query that matches documents using the `AND` boolean operator on other -queries. - -[source,js] --------------------------------------------------- -{ - "filtered" : { - "query" : { - "term" : { "name.first" : "shay" } - }, - "filter" : { - "and" : [ - { - "range" : { - "postDate" : { - "from" : "2010-03-01", - "to" : "2010-04-01" - } - } - }, - { - "prefix" : { "name.second" : "ba" } - } - ] - } - } -} --------------------------------------------------- - diff --git a/docs/reference/query-dsl/compound-queries.asciidoc b/docs/reference/query-dsl/compound-queries.asciidoc index 0228ddd90aa..c6a1f0f2c3a 100644 --- a/docs/reference/query-dsl/compound-queries.asciidoc +++ b/docs/reference/query-dsl/compound-queries.asciidoc @@ -41,18 +41,6 @@ documents which also match a `negative` query. Execute one query for the specified indices, and another for other indices. -<>, <>, <>:: - -Synonyms for the `bool` query. - -<>:: - -Combine a query clause in query context with another in filter context. deprecated[2.0.0-beta1,Use the `bool` query instead] - -<>:: - -Limits the number of documents examined per shard. - include::constant-score-query.asciidoc[] include::bool-query.asciidoc[] @@ -60,10 +48,5 @@ include::dis-max-query.asciidoc[] include::function-score-query.asciidoc[] include::boosting-query.asciidoc[] include::indices-query.asciidoc[] -include::and-query.asciidoc[] include::not-query.asciidoc[] -include::or-query.asciidoc[] -include::filtered-query.asciidoc[] -include::limit-query.asciidoc[] - diff --git a/docs/reference/query-dsl/filtered-query.asciidoc b/docs/reference/query-dsl/filtered-query.asciidoc deleted file mode 100644 index 5d399d07df4..00000000000 --- a/docs/reference/query-dsl/filtered-query.asciidoc +++ /dev/null @@ -1,96 +0,0 @@ -[[query-dsl-filtered-query]] -=== Filtered Query - -deprecated[2.0.0-beta1, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter] - -The `filtered` query is used to combine a query which will be used for -scoring with another query which will only be used for filtering the result -set. - -TIP: Exclude as many document as you can with a filter, then query just the -documents that remain. - -[source,js] --------------------------------------------------- -{ - "filtered": { - "query": { - "match": { "tweet": "full text search" } - }, - "filter": { - "range": { "created": { "gte": "now-1d/d" }} - } - } -} --------------------------------------------------- - -The `filtered` query can be used wherever a `query` is expected, for instance, -to use the above example in search request: - -[source,js] --------------------------------------------------- -curl -XGET localhost:9200/_search -d ' -{ - "query": { - "filtered": { <1> - "query": { - "match": { "tweet": "full text search" } - }, - "filter": { - "range": { "created": { "gte": "now-1d/d" }} - } - } - } -} -' --------------------------------------------------- -<1> The `filtered` query is passed as the value of the `query` - parameter in the search request. - -==== Filtering without a query - -If a `query` is not specified, it defaults to the -<>. This means that the -`filtered` query can be used to wrap just a filter, so that it can be used -wherever a query is expected. - -[source,js] --------------------------------------------------- -curl -XGET localhost:9200/_search -d ' -{ - "query": { - "filtered": { <1> - "filter": { - "range": { "created": { "gte": "now-1d/d" }} - } - } - } -} -' --------------------------------------------------- -<1> No `query` has been specified, so this request applies just the filter, - returning all documents created since yesterday. - -===== Multiple filters - -Multiple filters can be applied by wrapping them in a -<>, for example: - -[source,js] --------------------------------------------------- -{ - "filtered": { - "query": { "match": { "tweet": "full text search" }}, - "filter": { - "bool": { - "must": { "range": { "created": { "gte": "now-1d/d" }}}, - "should": [ - { "term": { "featured": true }}, - { "term": { "starred": true }} - ], - "must_not": { "term": { "deleted": false }} - } - } - } -} --------------------------------------------------- diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index 60aab15b54a..6710461b5ba 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -22,8 +22,8 @@ Then the following simple query can be executed with a [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -75,8 +75,8 @@ representation of the geo point, the filter can accept it as well: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "boost" : { + "must" : { "match_all" : {} }, "filter" : { @@ -106,8 +106,8 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -130,8 +130,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -152,8 +152,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -181,8 +181,8 @@ values separately. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -227,8 +227,8 @@ are not supported. Here is an example: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index 130319d951f..c5b6029dc2f 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -22,8 +22,8 @@ filter: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -51,8 +51,8 @@ representation of the geo point, the filter can accept it as well: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -77,8 +77,8 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -99,8 +99,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -119,8 +119,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { diff --git a/docs/reference/query-dsl/geo-distance-range-query.asciidoc b/docs/reference/query-dsl/geo-distance-range-query.asciidoc index cacf0a7a9cb..901cca09829 100644 --- a/docs/reference/query-dsl/geo-distance-range-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-range-query.asciidoc @@ -6,8 +6,8 @@ Filters documents that exists within a range from a specific point: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "boost" : { + "must" : { "match_all" : {} }, "filter" : { diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index ff53a351b67..306b2dd2d84 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -7,7 +7,7 @@ points. Here is an example: [source,js] -------------------------------------------------- { - "filtered" : { + "bool" : { "query" : { "match_all" : {} }, @@ -53,8 +53,8 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -80,8 +80,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { @@ -105,8 +105,8 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 8ab2f13a22c..77deabcad91 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -40,8 +40,8 @@ The following query will find the point using the Elasticsearch's -------------------------------------------------- { "query":{ - "filtered": { - "query": { + "bool": { + "must": { "match_all": {} }, "filter": { @@ -81,8 +81,8 @@ shape: [source,js] -------------------------------------------------- { - "filtered": { - "query": { + "bool": { + "must": { "match_all": {} }, "filter": { diff --git a/docs/reference/query-dsl/geohash-cell-query.asciidoc b/docs/reference/query-dsl/geohash-cell-query.asciidoc index c3c83de1866..807e8671572 100644 --- a/docs/reference/query-dsl/geohash-cell-query.asciidoc +++ b/docs/reference/query-dsl/geohash-cell-query.asciidoc @@ -43,8 +43,8 @@ next to the given cell. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "match_all" : {} }, "filter" : { diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index 6f42e6f6df6..24951bbe930 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -23,9 +23,9 @@ an example: ==== Scoring capabilities The `has_child` also has scoring support. The -supported score types are `min`, `max`, `sum`, `avg` or `none`. The default is +supported score modes are `min`, `max`, `total`, `avg` or `none`. The default is `none` and yields the same behaviour as in previous versions. If the -score type is set to another value than `none`, the scores of all the +score mode is set to another value than `none`, the scores of all the matching child documents are aggregated into the associated parent documents. The score type can be specified with the `score_mode` field inside the `has_child` query: diff --git a/docs/reference/query-dsl/has-parent-query.asciidoc b/docs/reference/query-dsl/has-parent-query.asciidoc index 2b848118204..19958bf149b 100644 --- a/docs/reference/query-dsl/has-parent-query.asciidoc +++ b/docs/reference/query-dsl/has-parent-query.asciidoc @@ -29,8 +29,8 @@ ignores the score from the parent document. The score is in this case equal to the boost on the `has_parent` query (Defaults to 1). If the score is set to `true`, then the score of the matching parent document is aggregated into the child documents belonging to the -matching parent document. The score type can be specified with the -`score` field inside the `has_parent` query: +matching parent document. The score mode can be specified with the +`score_mode` field inside the `has_parent` query: [source,js] -------------------------------------------------- diff --git a/docs/reference/query-dsl/limit-query.asciidoc b/docs/reference/query-dsl/limit-query.asciidoc deleted file mode 100644 index 198ad7862ab..00000000000 --- a/docs/reference/query-dsl/limit-query.asciidoc +++ /dev/null @@ -1,19 +0,0 @@ -[[query-dsl-limit-query]] -=== Limit Query - -A limit query limits the number of documents (per shard) to execute on. -For example: - -[source,js] --------------------------------------------------- -{ - "filtered" : { - "filter" : { - "limit" : {"value" : 100} - }, - "query" : { - "term" : { "name.first" : "shay" } - } - } -} --------------------------------------------------- diff --git a/docs/reference/query-dsl/not-query.asciidoc b/docs/reference/query-dsl/not-query.asciidoc index a74a0f11734..7854ee90afa 100644 --- a/docs/reference/query-dsl/not-query.asciidoc +++ b/docs/reference/query-dsl/not-query.asciidoc @@ -6,8 +6,8 @@ A query that filters out matched documents using a query. For example: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "term" : { "name.first" : "shay" } }, "filter" : { @@ -29,8 +29,8 @@ Or, in a longer form with a `filter` element: [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { + "bool" : { + "must" : { "term" : { "name.first" : "shay" } }, "filter" : { diff --git a/docs/reference/query-dsl/or-query.asciidoc b/docs/reference/query-dsl/or-query.asciidoc deleted file mode 100644 index 46005dc58c8..00000000000 --- a/docs/reference/query-dsl/or-query.asciidoc +++ /dev/null @@ -1,29 +0,0 @@ -[[query-dsl-or-query]] -=== Or Query - -deprecated[2.0.0-beta1, Use the `bool` query instead] - -A query that matches documents using the `OR` boolean operator on other -queries. - -[source,js] --------------------------------------------------- -{ - "filtered" : { - "query" : { - "term" : { "name.first" : "shay" } - }, - "filter" : { - "or" : [ - { - "term" : { "name.second" : "banon" } - }, - { - "term" : { "name.nick" : "kimchy" } - } - ] - } - } -} --------------------------------------------------- - diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index c14e8142f7d..223460f723d 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -2,13 +2,13 @@ === Script Query A query allowing to define -<> as filters. For -example: +<> as queries. They are typically used in a filter +context, for example: [source,js] ---------------------------------------------- -"filtered" : { - "query" : { +"bool" : { + "must" : { ... }, "filter" : { @@ -28,8 +28,8 @@ to use the ability to pass parameters to the script itself, for example: [source,js] ---------------------------------------------- -"filtered" : { - "query" : { +"bool" : { + "must" : { ... }, "filter" : { diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index 58b0ba5d85b..16cfd8cd89a 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -79,16 +79,12 @@ curl -XPUT localhost:9200/tweets/tweet/1 -d '{ # search on all the tweets that match the followers of user 2 curl -XGET localhost:9200/tweets/_search -d '{ "query" : { - "filtered" : { - "filter" : { - "terms" : { - "user" : { - "index" : "users", - "type" : "user", - "id" : "2", - "path" : "followers" - } - } + "terms" : { + "user" : { + "index" : "users", + "type" : "user", + "id" : "2", + "path" : "followers" } } } diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 9c6a9b49aae..922216aeb93 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -77,20 +77,6 @@ in ``query context'' and as a filter in ``filter context'' (see <>). Queries and filters have been merged. Any query clause can now be used as a query in ``query context'' and as a filter in ``filter context'' (see <>). -[role="exclude",id="query-dsl-and-filter"] -=== And Filter - -The `and` filter has been replaced by the <>. It behaves -as a query in ``query context'' and as a filter in ``filter context'' (see -<>). - -[role="exclude",id="query-dsl-or-filter"] -=== Or Filter - -The `or` filter has been replaced by the <>. It behaves -as a query in ``query context'' and as a filter in ``filter context'' (see -<>). - [role="exclude",id="query-dsl-not-filter"] === Not Filter @@ -195,13 +181,6 @@ The `indices` filter has been replaced by the <>. It b as a query in ``query context'' and as a filter in ``filter context'' (see <>). -[role="exclude",id="query-dsl-limit-filter"] -=== Limit Filter - -The `limit` filter has been replaced by the <>. -It behaves as a query in ``query context'' and as a filter in ``filter -context'' (see <>). - [role="exclude",id="query-dsl-match-all-filter"] === Match All Filter @@ -381,3 +360,86 @@ The shard query cache has been renamed <>. === Query cache The filter cache has been renamed <>. + +[role="exclude",id="query-dsl-filtered-query"] +=== Filtered query + +The `filtered` query is replaced in favour of the <> query. Instead of +the following: + +[source,js] +------------------------- +GET _search +{ + "query": { + "filtered": { + "query": { + "match": { + "text": "quick brown fox" + } + }, + "filter": { + "term": { + "status": "published" + } + } + } + } +} +------------------------- + +move the query and filter to the `must` and `filter` parameters in the `bool` +query: + +[source,js] +------------------------- +GET _search +{ + "query": { + "bool": { + "must": { + "match": { + "text": "quick brown fox" + } + }, + "filter": { + "term": { + "status": "published" + } + } + } + } +} +------------------------- + +[role="exclude",id="query-dsl-or-query"] +=== Or query + +The `or` query is replaced in favour of the <> query. + +[role="exclude",id="query-dsl-or-filter"] +=== Or filter + +The `or` filter is replaced in favour of the <> query. + +[role="exclude",id="query-dsl-and-query"] +=== And query + +The `and` query is replaced in favour of the <> query. + +[role="exclude",id="query-dsl-and-filter"] +=== And filter + +The `and` filter is replaced in favour of the <> query. + +[role="exclude",id="query-dsl-limit-query"] +=== Limit query + +The `limit` query is replaced in favour of the <> +parameter of search requests. + +[role="exclude",id="query-dsl-limit-filter"] +=== Limit filter + +The `limit` filter is replaced in favour of the <> +parameter of search requests. diff --git a/docs/reference/search.asciidoc b/docs/reference/search.asciidoc index 9a0de108c5a..f59444d739e 100644 --- a/docs/reference/search.asciidoc +++ b/docs/reference/search.asciidoc @@ -34,8 +34,8 @@ only the relevant shard: -------------------------------------------------- $ curl -XGET 'http://localhost:9200/twitter/tweet/_search?routing=kimchy' -d '{ "query": { - "filtered" : { - "query" : { + "bool" : { + "must" : { "query_string" : { "query" : "some query string here" } diff --git a/docs/reference/search/request/from-size.asciidoc b/docs/reference/search/request/from-size.asciidoc index d8d80952554..d19b850ec4a 100644 --- a/docs/reference/search/request/from-size.asciidoc +++ b/docs/reference/search/request/from-size.asciidoc @@ -19,3 +19,8 @@ defaults to `10`. } } -------------------------------------------------- + +Note that `from` + `size` can not be more than the `index.max_result_window` +index setting which defaults to 10,000. See the +{ref}/search-request-scroll.html[Scroll] api for more efficient ways to do deep +scrolling. diff --git a/docs/reference/search/request/named-queries-and-filters.asciidoc b/docs/reference/search/request/named-queries-and-filters.asciidoc index 183f0e73f10..96d7c1357a9 100644 --- a/docs/reference/search/request/named-queries-and-filters.asciidoc +++ b/docs/reference/search/request/named-queries-and-filters.asciidoc @@ -1,20 +1,16 @@ [[search-request-named-queries-and-filters]] -=== Named Queries and Filters +=== Named Queries Each filter and query can accept a `_name` in its top level definition. [source,js] -------------------------------------------------- { - "filtered" : { - "query" : { - "bool" : { - "should" : [ - {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, - {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} - ] - } - }, + "bool" : { + "should" : [ + {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, + {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} + ], "filter" : { "terms" : { "name.last" : ["banon", "kimchy"], @@ -26,32 +22,5 @@ Each filter and query can accept a `_name` in its top level definition. -------------------------------------------------- The search response will include for each hit the `matched_queries` it matched on. The tagging of queries and filters -only make sense for compound queries and filters (such as `bool` query and filter, `or` and `and` filter, `filtered` query etc.). +only make sense for the `bool` query. -Note, the query filter had to be enhanced in order to support this. In -order to set a name, the `fquery` filter should be used, which wraps a -query (just so there will be a place to set a name for it), for example: - -[source,js] --------------------------------------------------- -{ - "filtered" : { - "query" : { - "term" : { "name.first" : "shay" } - }, - "filter" : { - "fquery" : { - "query" : { - "term" : { "name.last" : "banon" } - }, - "_name" : "test" - } - } - } -} --------------------------------------------------- - -==== Named queries - -The support for the `_name` option on queries is available from version `0.90.4` and the support on filters is available -also in versions before `0.90.4`. diff --git a/docs/reference/search/request/post-filter.asciidoc b/docs/reference/search/request/post-filter.asciidoc index 274d14bd698..7c352e9fd50 100644 --- a/docs/reference/search/request/post-filter.asciidoc +++ b/docs/reference/search/request/post-filter.asciidoc @@ -2,28 +2,24 @@ === Post filter The `post_filter` is applied to the search `hits` at the very end of a search -request, after aggregations have already been calculated. It's purpose is +request, after aggregations have already been calculated. Its purpose is best explained by example: Imagine that you are selling shirts, and the user has specified two filters: `color:red` and `brand:gucci`. You only want to show them red shirts made by Gucci in the search results. Normally you would do this with a -<>: +<>: [source,js] -------------------------------------------------- curl -XGET localhost:9200/shirts/_search -d ' { "query": { - "filtered": { - "filter": { - "bool": { - "must": [ - { "term": { "color": "red" }}, - { "term": { "brand": "gucci" }} - ] - } - } + "bool": { + "filter": [ + { "term": { "color": "red" }}, + { "term": { "brand": "gucci" }} + ] } } } @@ -43,15 +39,11 @@ This can be done with a curl -XGET localhost:9200/shirts/_search -d ' { "query": { - "filtered": { - "filter": { - "bool": { - "must": [ - { "term": { "color": "red" }}, - { "term": { "brand": "gucci" }} - ] - } - } + "bool": { + "filter": [ + { "term": { "color": "red" }}, + { "term": { "brand": "gucci" }} + ] } }, "aggs": { @@ -78,7 +70,7 @@ the `post_filter`: curl -XGET localhost:9200/shirts/_search -d ' { "query": { - "filtered": { + "bool": { "filter": { { "term": { "brand": "gucci" }} <1> } diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index bce95289e8e..77670acafb1 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -169,8 +169,8 @@ We could write the query as: ------------------------------------------ { "query": { - "filtered": { - "query": { + "bool": { + "must": { "match": { "line": "{{text}}" <1> } @@ -212,7 +212,7 @@ via the REST API, should be written as a string: [source,js] -------------------- -"inline": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" +"inline": "{\"query\":{\"bool\":{\"must\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" -------------------- ================================== diff --git a/docs/reference/search/suggesters/completion-suggest.asciidoc b/docs/reference/search/suggesters/completion-suggest.asciidoc index ee8969b95e5..af93ea1598a 100644 --- a/docs/reference/search/suggesters/completion-suggest.asciidoc +++ b/docs/reference/search/suggesters/completion-suggest.asciidoc @@ -15,11 +15,12 @@ suggestion is, why you should use it at all, if you have prefix queries already. The answer is simple: Prefix suggestions are fast. The data structures are internally backed by Lucenes -`AnalyzingSuggester`, which uses FSTs to execute suggestions. Usually -these data structures are costly to create, stored in-memory and need to -be rebuilt every now and then to reflect changes in your indexed -documents. The `completion` suggester circumvents this by storing the -FST as part of your index during index time. This allows for really fast +`AnalyzingSuggester`, which uses FSTs (finite state transducers) to +execute suggestions. Usually these data structures are costly to +create, stored in-memory and need to be rebuilt every now and then to +reflect changes in your indexed documents. The `completion` suggester +circumvents this by storing the FST (finite state transducer) as part +of your index during index time. This allows for really fast loads and executions. [[completion-suggester-mapping]] diff --git a/docs/reference/search/validate.asciidoc b/docs/reference/search/validate.asciidoc index b47f63e6942..a08e183089c 100644 --- a/docs/reference/search/validate.asciidoc +++ b/docs/reference/search/validate.asciidoc @@ -55,8 +55,8 @@ Or, with a request body: -------------------------------------------------- curl -XGET 'http://localhost:9200/twitter/tweet/_validate/query' -d '{ "query" : { - "filtered" : { - "query" : { + "bool" : { + "must" : { "query_string" : { "query" : "*:*" } @@ -99,7 +99,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/_validate/query?q=post_date:foo& "explanations" : [ { "index" : "twitter", "valid" : false, - "error" : "org.elasticsearch.index.query.QueryParsingException: [twitter] Failed to parse; org.elasticsearch.ElasticsearchParseException: failed to parse date field [foo], tried both date format [dateOptionalTime], and timestamp number; java.lang.IllegalArgumentException: Invalid format: \"foo\"" + "error" : "[twitter] QueryParsingException[Failed to parse]; nested: IllegalArgumentException[Invalid format: \"foo\"];; java.lang.IllegalArgumentException: Invalid format: \"foo\"" } ] } -------------------------------------------------- @@ -112,14 +112,14 @@ For Fuzzy Queries: [source,js] -------------------------------------------------- -curl -XGET 'http://localhost:9200/imdb/movies/_validate/query?rewrite=true' +curl -XGET 'http://localhost:9200/imdb/movies/_validate/query?rewrite=true' -d ' { "query": { "fuzzy": { "actors": "kyle" } } -} +}' -------------------------------------------------- Response: @@ -137,7 +137,7 @@ Response: { "index": "imdb", "valid": true, - "explanation": "filtered(plot:kyle plot:kylie^0.75 plot:kyne^0.75 plot:lyle^0.75 plot:pyle^0.75)->cache(_type:movies)" + "explanation": "plot:kyle plot:kylie^0.75 plot:kyne^0.75 plot:lyle^0.75 plot:pyle^0.75 #_type:movies" } ] } @@ -175,7 +175,7 @@ Response: { "index": "imdb", "valid": true, - "explanation": "filtered(((title:terminator^3.71334 plot:future^2.763601 plot:human^2.8415773 plot:sarah^3.4193945 plot:kyle^3.8244398 plot:cyborg^3.9177752 plot:connor^4.040236 plot:reese^4.7133346 ... )~6) -ConstantScore(_uid:movies#88247))->cache(_type:movies)" + "explanation": "((title:terminator^3.71334 plot:future^2.763601 plot:human^2.8415773 plot:sarah^3.4193945 plot:kyle^3.8244398 plot:cyborg^3.9177752 plot:connor^4.040236 plot:reese^4.7133346 ... )~6) -ConstantScore(_uid:movies#88247) #_type:movies" } ] } diff --git a/docs/reference/testing/testing-framework.asciidoc b/docs/reference/testing/testing-framework.asciidoc index ecfd168c144..a2d3f39d5c1 100644 --- a/docs/reference/testing/testing-framework.asciidoc +++ b/docs/reference/testing/testing-framework.asciidoc @@ -18,17 +18,11 @@ All of the tests are run using a custom junit runner, the `RandomizedRunner` pro [[using-elasticsearch-test-classes]] === Using the elasticsearch test classes -First, you need to include the testing dependency in your project. If you use maven and its `pom.xml` file, it looks like this +First, you need to include the testing dependency in your project, along with the elasticsearch dependency you have already added. If you use maven and its `pom.xml` file, it looks like this [[source,xml]] -------------------------------------------------- - - com.carrotsearch.randomizedtesting - randomizedtesting-runner - ${randomizedtesting-runner.version} - test - org.apache.lucene lucene-test-framework @@ -42,25 +36,22 @@ First, you need to include the testing dependency in your project. If you use ma test test-jar - - org.elasticsearch - elasticsearch - ${elasticsearch.version} - test - -------------------------------------------------- -Replace the elasticsearch version and the lucene versions with the current elasticsearch version and its accompanying lucene release. -And replace the "randomizedtesting version" with the version that the current elasticsearch uses. +Replace the elasticsearch version and the lucene version with the corresponding elasticsearch version and its accompanying lucene release. -There are already have a couple of classes, you can inherit from in your own test classes. The advantages of doing so is having already defined loggers, the whole randomized infrastructure is set up already. +We provide a few classes that you can inherit from in your own test classes which provide: + +* pre-defined loggers +* randomized testing infrastructure +* a number of helper methods [[unit-tests]] === unit tests -In case you only need to execute a unit test, because your implementation can be isolated that well and does not require an up and running elasticsearch cluster, you can use the `ElasticsearchTestCase`. If you are testing lucene features, use `ElasticsearchLuceneTestCase` and if you are testing concrete token streams, use the `ElasticsearchTokenStreamTestCase` class. Those specific classes execute additional checks, which ensure that no resources leaks are happening, after the test has run. +If your test is a well isolated unit test which doesn't need a running elasticsearch cluster, you can use the `ESTestCase`. If you are testing lucene features, use `ESTestCase` and if you are testing concrete token streams, use the `ESTokenStreamTestCase` class. Those specific classes execute additional checks which ensure that no resources leaks are happening, after the test has run. [[integration-tests]] @@ -68,18 +59,20 @@ In case you only need to execute a unit test, because your implementation can be These kind of tests require firing up a whole cluster of nodes, before the tests can actually be run. Compared to unit tests they are obviously way more time consuming, but the test infrastructure tries to minimize the time cost by only restarting the whole cluster, if this is configured explicitly. -The class your tests have to inherit from is `ElasticsearchIntegrationTest`. As soon as you inherit, there is no need for you to start any elasticsearch nodes manually in your test anymore, though you might need to ensure that at least a certain number of nodes is up. +The class your tests have to inherit from is `ESIntegTestCase`. By inheriting from this class, you will no longer need to start elasticsearch nodes manually in your test, although you might need to ensure that at least a certain number of nodes are up. The integration test behaviour can be configured heavily by specifying different system properties on test runs. See the `TESTING.asciidoc` documentation in the https://github.com/elastic/elasticsearch/blob/master/TESTING.asciidoc[source repository] for more information. + [[number-of-shards]] ==== number of shards The number of shards used for indices created during integration tests is randomized between `1` and `10` unless overwritten upon index creation via index settings. -Rule of thumb is not to specify the number of shards unless needed, so that each test will use a different one all the time. +The rule of thumb is not to specify the number of shards unless needed, so that each test will use a different one all the time. Alternatively you can override the `numberOfShards()` method. The same applies to the `numberOfReplicas()` method. + [[helper-methods]] ==== generic helper methods -There are a couple of helper methods in `ElasticsearchIntegrationTest`, which will make your tests shorter and more concise. +There are a couple of helper methods in `ESIntegTestCase`, which will make your tests shorter and more concise. [horizontal] `refresh()`:: Refreshes all indices in a cluster @@ -98,7 +91,7 @@ There are a couple of helper methods in `ElasticsearchIntegrationTest`, which wi [[test-cluster-methods]] ==== test cluster methods -The `TestCluster` class is the heart of the cluster functionality in a randomized test and allows you to configure a specific setting or replay certain types of outages to check, how your custom code reacts. +The `InternalTestCluster` class is the heart of the cluster functionality in a randomized test and allows you to configure a specific setting or replay certain types of outages to check, how your custom code reacts. [horizontal] `ensureAtLeastNumNodes(n)`:: Ensure at least the specified number of nodes is running in the cluster @@ -112,10 +105,30 @@ The `TestCluster` class is the heart of the cluster functionality in a randomize `startNode(settings)`:: Create and start a new elasticsearch node +[[changing-node-settings]] +==== Changing node settings + +If you want to ensure a certain configuration for the nodes, which are started as part of the `EsIntegTestCase`, you can override the `nodeSettings()` method + +[source,java] +----------------------------------------- +public class Mytests extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) + .put("node.mode", "network") + .build(); + } + +} +----------------------------------------- + + [[accessing-clients]] ==== Accessing clients -In order to execute any actions, you have to use a client. You can use the `ElasticsearchIntegrationTest.client()` method to get back a random client. This client can be a `TransportClient` or a `NodeClient` - and usually you do not need to care as long as the action gets executed. There are several more methods for client selection inside of the `TestCluster` class, which can be accessed using the `ElasticsearchIntegrationTest.cluster()` method. +In order to execute any actions, you have to use a client. You can use the `ESIntegTestCase.client()` method to get back a random client. This client can be a `TransportClient` or a `NodeClient` - and usually you do not need to care as long as the action gets executed. There are several more methods for client selection inside of the `InternalTestCluster` class, which can be accessed using the `ESIntegTestCase.internalCluster()` method. [horizontal] `iterator()`:: An iterator over all available clients @@ -136,7 +149,7 @@ You can use the `@ClusterScope` annotation at class level to configure this beha [source,java] ----------------------------------------- @ClusterScope(scope=TEST, numNodes=1) -public class CustomSuggesterSearchTests extends ElasticsearchIntegrationTest { +public class CustomSuggesterSearchTests extends ESIntegTestCase { // ... tests go here } ----------------------------------------- @@ -145,7 +158,7 @@ The above sample configures the test to use a new cluster for each test method. [[changing-node-configuration]] -==== Changing node configuration +==== Changing plugins via configuration As elasticsearch is using JUnit 4, using the `@Before` and `@After` annotations is not a problem. However you should keep in mind, that this does not have any effect in your cluster setup, as the cluster is already up and running when those methods are run. So in case you want to configure settings - like loading a plugin on node startup - before the node is actually running, you should overwrite the `nodePlugins()` method from the `ESIntegTestCase` class and return the plugin classes each node should load. @@ -157,13 +170,6 @@ protected Collection> nodePlugins() { } ----------------------------------------- -[[parametrized-tests]] -=== parameterized tests - -It is possible to write parameterized tests, that get run multiple times with different parameters. `RandomizedRunner` needs to be used rather than the `Parameterized` runner provided with junit (all the base test classes already use `RandomizedRunner` though). The method that provides the parameters -needs to be annotated with the `@ParametersFactory` annotation and must be `static`, `public`, without arguments and must have a return type assignable to `Iterable`. The iterable must return arrays conforming to the suite class's constructor with respect to the number and types of parameters. -The constructor's parameters can be annotated with the `@Name` annotation to provide more descriptive parameter names for test descriptions. Have a look at `ElasticsearchRestTests` for an example. - [[randomized-testing]] === Randomized testing @@ -200,6 +206,8 @@ So, how can you create random data. The most important thing to know is, that yo `randomLocale()`:: Returns a random locale `randomTimeZone()`:: Returns a random timezone +`randomFrom()`:: Returns a random element from a list/array + In addition, there are a couple of helper methods, allowing you to create random ASCII and Unicode strings, see methods beginning with `randomAscii`, `randomUnicode`, and `randomRealisticUnicode` in the random test class. The latter one tries to create more realistic unicode string by not being arbitrary random. If you want to debug a specific problem with a specific random seed, you can use the `@Seed` annotation to configure a specific seed for a test. If you want to run a test more than once, instead of starting the whole test suite over and over again, you can use the `@Repeat` annotation with an arbitrary value. Each iteration than gets run with a different seed. @@ -208,7 +216,7 @@ If you want to debug a specific problem with a specific random seed, you can use [[assertions]] === Assertions -As many elasticsearch tests are checking for a similar output, like the amount of hits or the first hit or special highlighting, a couple of predefined assertions have been created. Those have been put into the `ElasticsearchAssertions` class. +As many elasticsearch tests are checking for a similar output, like the amount of hits or the first hit or special highlighting, a couple of predefined assertions have been created. Those have been put into the `ElasticsearchAssertions` class. There is also a specific geo assertions in `ElasticsearchGeoAssertions`. [horizontal] `assertHitCount()`:: Checks hit count of a search or count request @@ -232,6 +240,8 @@ Common matchers `hasId()`:: Matcher to check for a search hit id `hasType()`:: Matcher to check for a search hit type `hasIndex()`:: Matcher to check for a search hit index +`hasScore()`:: Matcher to check for a certain score of a hit +`hasStatus()`:: Matcher to check for a certain `RestStatus` of a response Usually, you would combine assertions and matchers in your test like this diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index c1a1ec208f5..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7f57ef60f302b30e88196d4f0d11f789c5cfabd diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1702265.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..54c8e962a9d --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +1f92d0376ca9219b0bf96fe5bd9a913089608d6a diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 60ea23d0f56..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5d1023fc3f28a42357d44d3a330ac0df1df4bf42 diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1702265.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..a3885bfaaa1 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +60ee5bc1ac8ec102434e7064141a1f40281918b5 diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 92243aee3ee..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -654c3e345ffdd74605582d1320c51c1c550a5cca diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1702265.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..44ac92c5195 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +977aa506485d358b40602347c11238b0f912fe2c diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index a9159ebb32a..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -80c09e367abf2ad936c86cf74a16ae2b4e805b81 diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1702265.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..6f7d174c750 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +61911b8400160bd206ea6ea46ba08fd9ba09e72b diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 deleted file mode 100644 index 390511f227b..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7c6ae4fc7e8e1d39c155068fea67b7fabb12c444 diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1702265.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1702265.jar.sha1 new file mode 100644 index 00000000000..cf50fb6450e --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1702265.jar.sha1 @@ -0,0 +1 @@ +5a9bdf48b63562bf1ac8a73c1c6bdb4cc450439e diff --git a/plugins/cloud-gce/src/main/java/org/elasticsearch/plugin/cloud/gce/CloudGcePlugin.java b/plugins/cloud-gce/src/main/java/org/elasticsearch/plugin/cloud/gce/CloudGcePlugin.java index 8b2a3d27da3..5384f2c2599 100644 --- a/plugins/cloud-gce/src/main/java/org/elasticsearch/plugin/cloud/gce/CloudGcePlugin.java +++ b/plugins/cloud-gce/src/main/java/org/elasticsearch/plugin/cloud/gce/CloudGcePlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.gce.GceDiscovery; +import org.elasticsearch.discovery.gce.GceUnicastHostsProvider; import org.elasticsearch.plugins.Plugin; import java.util.ArrayList; @@ -72,6 +73,7 @@ public class CloudGcePlugin extends Plugin { public void onModule(DiscoveryModule discoveryModule) { discoveryModule.addDiscoveryType("gce", GceDiscovery.class); + discoveryModule.addUnicastHostProvider(GceUnicastHostsProvider.class); } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index 180581b09ac..38c704249d5 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -24,6 +24,7 @@ import org.elasticsearch.cloud.aws.Ec2Module; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.ec2.Ec2Discovery; import org.elasticsearch.plugins.Plugin; @@ -61,5 +62,6 @@ public class Ec2DiscoveryPlugin extends Plugin { public void onModule(DiscoveryModule discoveryModule) { discoveryModule.addDiscoveryType("ec2", Ec2Discovery.class); + discoveryModule.addUnicastHostProvider(AwsEc2UnicastHostsProvider.class); } } diff --git a/plugins/repository-s3/rest-api-spec/test/repository_s3/20_repository.yaml b/plugins/repository-s3/rest-api-spec/test/repository_s3/20_repository.yaml index 69b50b66530..9891c3e3adf 100644 --- a/plugins/repository-s3/rest-api-spec/test/repository_s3/20_repository.yaml +++ b/plugins/repository-s3/rest-api-spec/test/repository_s3/20_repository.yaml @@ -1,6 +1,9 @@ # Integration tests for Repository S3 component # "S3 repository can be registereed": + - skip: + version: "all" + reason: does not work on java9, see https://github.com/aws/aws-sdk-java/pull/432 - do: snapshot.create_repository: repository: test_repo_s3_1 diff --git a/pom.xml b/pom.xml index cb4de2ac22c..ceef389cd37 100644 --- a/pom.xml +++ b/pom.xml @@ -45,7 +45,7 @@ 5.4.0 - 1701068 + 1702265 5.4.0-snapshot-${lucene.snapshot.revision} 2.1.16 2.5.3 @@ -115,8 +115,8 @@ fail ${skipTests} ${skipTests} - ${project.build.directory}/integ-tests - ${project.build.directory}/integ-deps + ${project.build.directory}/integ tests + ${project.build.directory}/integ deps ${integ.scratch}/temp 9400 9500 @@ -582,7 +582,7 @@ -XDignore.symbol.file -Xlint:all ${xlint.options} - -Werror + diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index e18ca34afa2..327bd2c76c6 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -69,7 +69,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - protected static ESLogger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); + protected static final ESLogger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); private static final AtomicInteger counter = new AtomicInteger(); private static Client client; diff --git a/qa/vagrant/pom.xml b/qa/vagrant/pom.xml index c2b6ca79dca..e31560d6c55 100644 --- a/qa/vagrant/pom.xml +++ b/qa/vagrant/pom.xml @@ -165,6 +165,18 @@ ${elasticsearch.version} zip + + org.elasticsearch.plugin + discovery-ec2 + ${elasticsearch.version} + zip + + + org.elasticsearch.plugin + discovery-multicast + ${elasticsearch.version} + zip + org.elasticsearch.plugin lang-javascript @@ -177,6 +189,12 @@ ${elasticsearch.version} zip + + org.elasticsearch.plugin + mapper-murmur3 + ${elasticsearch.version} + zip + org.elasticsearch.plugin mapper-size @@ -185,7 +203,7 @@ org.elasticsearch.plugin - mapper-murmur3 + repository-s3 ${elasticsearch.version} zip diff --git a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats index 8f605216a61..2579e6a7359 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats @@ -67,6 +67,17 @@ setup() { verify_package_installation } +@test "[DEB] elasticsearch isn't started by package install" { + # Wait a second to give Elasticsearch a change to start if it is going to. + # This isn't perfect by any means but its something. + sleep 1 + ! ps aux | grep elasticsearch | grep java + # You might be tempted to use jps instead of the above but that'd have to + # look like: + # ! sudo -u elasticsearch jps | grep -i elasticsearch + # which isn't really easier to read than the above. +} + @test "[DEB] test elasticsearch" { start_elasticsearch_service diff --git a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats index 588fe382699..09bb5b0b39a 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats @@ -66,6 +66,13 @@ setup() { verify_package_installation } +@test "[RPM] elasticsearch isn't started by package install" { + # Wait a second to give Elasticsearch a change to start if it is going to. + # This isn't perfect by any means but its something. + sleep 1 + ! ps aux | grep elasticsearch | grep java +} + @test "[RPM] test elasticsearch" { start_elasticsearch_service diff --git a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats index 8df4f4a980b..77e8d807362 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats @@ -46,6 +46,14 @@ setup() { systemctl daemon-reload } +@test "[SYSTEMD] daemon isn't enabled on restart" { + # Rather than restart the VM we just ask systemd if it plans on starting + # elasticsearch on restart. Not as strong as a restart but much much + # faster. + run systemctl is-enabled elasticsearch.service + [ "$output" = "disabled" ] +} + @test "[SYSTEMD] enable" { systemctl enable elasticsearch.service diff --git a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats b/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats index 5bf43163ab1..4f134ba68f7 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/70_sysv_initd.bats @@ -37,11 +37,31 @@ setup() { skip_not_dpkg_or_rpm } +@test "[INIT.D] remove any leftover configuration to start elasticsearch on restart" { + # This configuration can be added with a command like: + # $ sudo update-rc.d elasticsearch defaults 95 10 + # but we want to test that the RPM _doesn't_ add it on its own. + # Note that it'd be incorrect to use: + # $ sudo update-rc.d elasticsearch disable + # here because that'd prevent elasticsearch from installing the symlinks + # that cause it to be started on restart. + sudo update-rc.d -f elasticsearch remove +} + @test "[INIT.D] install elasticsearch" { clean_before_test install_package } +@test "[INIT.D] daemon isn't enabled on restart" { + # Rather than restart the VM which would be slow we check for the symlinks + # that init.d uses to restart the application on startup. + ! find /etc/rc[0123456].d | grep elasticsearch + # Note that we don't use -iname above because that'd have to look like: + # [ $(find /etc/rc[0123456].d -iname "elasticsearch*" | wc -l) -eq 0 ] + # Which isn't really clearer than what we do use. +} + @test "[INIT.D] start" { service elasticsearch start diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index ebd23eeba2d..24900aa01ee 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -83,11 +83,6 @@ else } fi -@test "[$GROUP] install jvm-example plugin" { - install_jvm_example - remove_jvm_example -} - @test "[$GROUP] install jvm-example plugin with a custom path.plugins" { # Clean up after the last time this test was run rm -rf /tmp/plugins.* @@ -132,54 +127,181 @@ fi remove_jvm_example } +# Note that all of the tests from here to the end of the file expect to be run +# in sequence and don't take well to being run one at a time. +@test "[$GROUP] install jvm-example plugin" { + install_jvm_example +} + @test "[$GROUP] install icu plugin" { - install_and_remove_special_plugin analysis icu icu4j-*.jar + install_and_check_plugin analysis icu icu4j-*.jar } @test "[$GROUP] install kuromoji plugin" { - install_and_remove_special_plugin analysis kuromoji + install_and_check_plugin analysis kuromoji } @test "[$GROUP] install phonetic plugin" { - install_and_remove_special_plugin analysis phonetic commons-codec-*.jar + install_and_check_plugin analysis phonetic commons-codec-*.jar } @test "[$GROUP] install smartcn plugin" { - install_and_remove_special_plugin analysis smartcn + install_and_check_plugin analysis smartcn } @test "[$GROUP] install stempel plugin" { - install_and_remove_special_plugin analysis stempel -} - -@test "[$GROUP] install aws plugin" { - install_and_remove_special_plugin cloud aws aws-java-sdk-core-*.jar + install_and_check_plugin analysis stempel } @test "[$GROUP] install azure plugin" { - install_and_remove_special_plugin cloud azure azure-core-*.jar + install_and_check_plugin cloud azure azure-core-*.jar } @test "[$GROUP] install gce plugin" { - install_and_remove_special_plugin cloud gce google-api-client-*.jar + install_and_check_plugin cloud gce google-api-client-*.jar } -@test "[$GROUP] install delete by query" { - install_and_remove_special_plugin - delete-by-query +@test "[$GROUP] install delete by query plugin" { + install_and_check_plugin - delete-by-query +} + +@test "[$GROUP] install ec2 discovery plugin" { + install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar +} + +@test "[$GROUP] install multicast discovery plugin" { + install_and_check_plugin discovery multicast } @test "[$GROUP] install javascript plugin" { - install_and_remove_special_plugin lang javascript rhino-*.jar + install_and_check_plugin lang javascript rhino-*.jar } @test "[$GROUP] install python plugin" { - install_and_remove_special_plugin lang python jython-standalone-*.jar + install_and_check_plugin lang python jython-standalone-*.jar } -@test "[$GROUP] install murmur3 mapper" { - install_and_remove_special_plugin mapper murmur3 +@test "[$GROUP] install murmur3 mapper plugin" { + install_and_check_plugin mapper murmur3 } -@test "[$GROUP] install size mapper" { - install_and_remove_special_plugin mapper size +@test "[$GROUP] install size mapper plugin" { + install_and_check_plugin mapper size +} + +@test "[$GROUP] install s3 repository plugin" { + install_and_check_plugin repository s3 aws-java-sdk-core-*.jar +} + +@test "[$GROUP] install site example" { + # Doesn't use install_and_check_plugin because this is a site plugin + install_plugin site-example $(readlink -m site-example-*.zip) + assert_file_exist "$ESHOME/plugins/site-example/_site/index.html" +} + +@test "[$GROUP] start elasticsearch with all plugins installed" { + start_elasticsearch_service +} + +@test "[$GROUP] check the installed plugins matches the list of build plugins" { + curl -s localhost:9200/_cat/plugins?h=c | sed 's/ *$//' | + sort > /tmp/installed + ls /elasticsearch/plugins/*/pom.xml | cut -d '/' -f 4 | + sort > /tmp/expected + echo "Checking installed plugins (<) against the plugins directory (>):" + diff /tmp/installed /tmp/expected +} + +@test "[$GROUP] stop elasticsearch" { + stop_elasticsearch_service +} + +@test "[$GROUP] remove jvm-example plugin" { + remove_jvm_example +} + +@test "[$GROUP] remove icu plugin" { + remove_plugin analysis-icu +} + +@test "[$GROUP] remove kuromoji plugin" { + remove_plugin analysis-kuromoji +} + +@test "[$GROUP] remove phonetic plugin" { + remove_plugin analysis-phonetic +} + +@test "[$GROUP] remove smartcn plugin" { + remove_plugin analysis-smartcn +} + +@test "[$GROUP] remove stempel plugin" { + remove_plugin analysis-stempel +} + +@test "[$GROUP] remove aws plugin" { + remove_plugin cloud-aws +} + +@test "[$GROUP] remove azure plugin" { + remove_plugin cloud-azure +} + +@test "[$GROUP] remove gce plugin" { + remove_plugin cloud-gce +} + +@test "[$GROUP] remove delete by query plugin" { + remove_plugin delete-by-query +} + +@test "[$GROUP] remove ec2 discovery plugin" { + remove_plugin discovery-ec2 +} + +@test "[$GROUP] remove multicast discovery plugin" { + remove_plugin discovery-multicast +} + +@test "[$GROUP] remove javascript plugin" { + remove_plugin lang-javascript +} + +@test "[$GROUP] remove python plugin" { + remove_plugin lang-python +} + +@test "[$GROUP] remove murmur3 mapper plugin" { + remove_plugin mapper-murmur3 +} + +@test "[$GROUP] remove size mapper plugin" { + remove_plugin mapper-size +} + +@test "[$GROUP] remove s3 repository plugin" { + remove_plugin repository-s3 +} + +@test "[$GROUP] remove site example plugin" { + remove_plugin site-example +} + +@test "[$GROUP] start elasticsearch with all plugins removed" { + start_elasticsearch_service +} + +@test "[$GROUP] check that there are now no plugins installed" { + curl -s localhost:9200/_cat/plugins > /tmp/installed + local installedCount=$(cat /tmp/installed | wc -l) + [ "$installedCount" == "0" ] || { + echo "Expected all plugins to be removed but found $installedCount:" + cat /tmp/installed + false + } +} + +@test "[$GROUP] stop elasticsearch" { + stop_elasticsearch_service } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 2ca6e7501ab..787f774acbc 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -36,6 +36,12 @@ install_plugin() { assert_file_exist "$ESPLUGINS/$name" assert_file_exist "$ESPLUGINS/$name/plugin-descriptor.properties" +} + +install_jvm_plugin() { + local name=$1 + local path="$2" + install_plugin $name "$path" assert_file_exist "$ESPLUGINS/$name/$name"*".jar" } @@ -53,7 +59,7 @@ remove_plugin() { # placements for non-site plugins. install_jvm_example() { local relativePath=${1:-$(readlink -m jvm-example-*.zip)} - install_plugin jvm-example "$relativePath" + install_jvm_plugin jvm-example "$relativePath" assert_file_exist "$ESHOME/bin/jvm-example" assert_file_exist "$ESHOME/bin/jvm-example/test" @@ -74,14 +80,14 @@ remove_jvm_example() { assert_file_exist "$ESCONFIG/jvm-example/example.yaml" } -# Install and remove a plugin with a special prefix. For the most part prefixes -# are just useful for grouping but the "analysis" prefix is special because all +# Install a plugin with a special prefix. For the most part prefixes are just +# useful for grouping but the "analysis" prefix is special because all # analysis plugins come with a corresponding lucene-analyzers jar. # $1 - the prefix # $2 - the plugin name # $@ - all remaining arguments are jars that must exist in the plugin's # installation directory -install_and_remove_special_plugin() { +install_and_check_plugin() { local prefix=$1 shift local name=$1 @@ -93,12 +99,11 @@ install_and_remove_special_plugin() { local fullName="$prefix-$name" fi - install_plugin $fullName "$(readlink -m $fullName-*.zip)" + install_jvm_plugin $fullName "$(readlink -m $fullName-*.zip)" if [ $prefix == 'analysis' ]; then assert_file_exist "$(readlink -m $ESPLUGINS/$fullName/lucene-analyzers-$name-*.jar)" fi for file in "$@"; do assert_file_exist "$(readlink -m $ESPLUGINS/$fullName/$file)" done - remove_plugin $fullName } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml index f076a3b1859..5f6e3e9ae09 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml @@ -1,30 +1,64 @@ --- -"Test cat nodes attrs output": - - - skip: - version: "all" - reason: "Waiting for #12558" - +"Help": - do: - cat.nodeattrs: {} + cat.nodeattrs: + help: true - match: $body: | - /((\S+)\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+(\S+)\s*)+/ + /^ node .+ \n + id .+ \n + pid .+ \n + host .+ \n + ip .+ \n + port .+ \n + attr .+ \n + value .+ \n + $/ + +--- +"Test cat nodes attrs output": + - do: + cat.nodeattrs: {} + - match: # All attributes look good + $body: | + /^# node\s+ host\s+ ip\s+ attr\s+ value\s* \n + (((\S+\s?){1,10})\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+ (\S+)\s* \n)+ + $/ + - match: # A specific planted attribute is present and looks good + $body: | + /# node\s+ host\s+ ip\s+ attr\s+ value\s* \n + ((\S+\s?){1,10})\s+ (\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+testattr\s+ test \s* \n + / + # Note for future editors: its quite possible to construct a regex with an + # intense amount of backtracking if you use something like (\S\s?)+ to match + # node name. - do: cat.nodeattrs: v: true - - - match: + - match: # All attributes look good including the heading $body: | - /((\S+)\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+(\S+)\s*)+/ + /^ node\s+ host\s+ ip\s+ attr\s+ value\s* \n + (((\S+\s?){1,10})\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+ (\S+)\s* \n)+ + $/ + - match: # A specific planted attribute is present and looks good + $body: | + /# node\s+ host\s+ ip\s+ attr\s+ value\s* \n + ((\S+\s?){1,10})\s+ (\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+testattr\s+ test \s* \n + / - do: cat.nodeattrs: h: attr,value v: true - - - match: + - match: # All attributes look good $body: | - /((\S+)\s+(\S+)\s*)+/ + /^ attr\s+ value\s*\n + ((\S+)\s+ (\S+)\s*)+ + $/ + - match: # A specific planted attribute is present and looks good + $body: | + /# attr\s+ value\s*\n + testattr\s+ test\s*\n + / diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index 4bc3e996fca..c34437ce064 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -28,8 +28,8 @@ index1 \s+ \d \s+ # shard \d+ \s+ # time - (store|replica|snapshot|relocating) \s+ # type - (init|index|start|translog|finalize|done) \s+ # stage + (store|replica|snapshot|relocating) \s+ # type + (init|index|verify_index|translog|finalize|done) \s+ # stage [-\w./]+ \s+ # source_host [-\w./]+ \s+ # target_host [-\w./]+ \s+ # repository diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index 40f8740db96..766d1782ff1 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -39,6 +39,7 @@ indexing.index_current .+ \n indexing.index_time .+ \n indexing.index_total .+ \n + indexing.index_failed .+ \n merges.current .+ \n merges.current_docs .+ \n merges.current_size .+ \n