jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
- if (map.put(jar.toUri().toURL().getFile(), permissions) != null) {
- // just be paranoid ok?
- throw new IllegalStateException("per-plugin permissions already granted for jar file: " + jar);
- }
+ codebases.add(jar.toRealPath().toUri().toURL());
+ }
+ }
+
+ // parse the plugin's policy file into a set of permissions
+ Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
+
+ // consult this policy for each of the plugin's jars:
+ for (URL url : codebases) {
+ if (map.put(url.getFile(), policy) != null) {
+ // just be paranoid ok?
+ throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
}
}
}
@@ -194,6 +163,35 @@ final class Security {
return Collections.unmodifiableMap(map);
}
+ /**
+ * Reads and returns the specified {@code policyFile}.
+ *
+ * Resources (e.g. jar files and directories) listed in {@code codebases} location
+ * will be provided to the policy file via a system property of the short name:
+ * e.g. ${codebase.joda-convert-1.2.jar}
would map to full URL.
+ */
+ @SuppressForbidden(reason = "accesses fully qualified URLs to configure security")
+ static Policy readPolicy(URL policyFile, URL codebases[]) {
+ try {
+ try {
+ // set codebase properties
+ for (URL url : codebases) {
+ String shortName = PathUtils.get(url.toURI()).getFileName().toString();
+ System.setProperty("codebase." + shortName, url.toString());
+ }
+ return Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toURI()));
+ } finally {
+ // clear codebase properties
+ for (URL url : codebases) {
+ String shortName = PathUtils.get(url.toURI()).getFileName().toString();
+ System.clearProperty("codebase." + shortName);
+ }
+ }
+ } catch (NoSuchAlgorithmException | URISyntaxException e) {
+ throw new IllegalArgumentException("unable to parse policy file `" + policyFile + "`", e);
+ }
+ }
+
/** returns dynamic Permissions to configured paths */
static Permissions createPermissions(Environment environment) throws IOException {
Permissions policy = new Permissions();
diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
index 17551012edb..10ef7bcb13c 100644
--- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
+++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
@@ -143,7 +143,7 @@ public class TransportClient extends AbstractClient {
modules.add(new ClusterNameModule(this.settings));
modules.add(new ThreadPoolModule(threadPool));
modules.add(new TransportModule(this.settings));
- modules.add(new SearchModule(this.settings) {
+ modules.add(new SearchModule() {
@Override
protected void configure() {
// noop
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java
similarity index 50%
rename from core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java
rename to core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java
index e13661c814c..ed0a7f56b9c 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java
+++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java
@@ -17,30 +17,7 @@
* under the License.
*/
-package org.elasticsearch.index.query;
+package org.elasticsearch.cluster.action.shard;
-import java.io.IOException;
-
-/**
- * Parser for query filter
- * @deprecated use any query instead directly, possible since queries and filters are merged.
- */
-// TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed
-@Deprecated
-public class QueryFilterParser implements QueryParser {
-
- @Override
- public String[] names() {
- return new String[]{QueryFilterBuilder.NAME};
- }
-
- @Override
- public QueryFilterBuilder fromXContent(QueryParseContext parseContext) throws IOException {
- return new QueryFilterBuilder(parseContext.parseInnerQueryBuilder());
- }
-
- @Override
- public QueryFilterBuilder getBuilderPrototype() {
- return QueryFilterBuilder.PROTOTYPE;
- }
-}
\ No newline at end of file
+public class NoOpShardStateActionListener implements ShardStateAction.Listener {
+}
diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
index 02867c930be..fc465ae283c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
+++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
@@ -77,27 +77,29 @@ public class ShardStateAction extends AbstractComponent {
transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler());
}
- public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure) {
+ public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) {
DiscoveryNode masterNode = clusterService.state().nodes().masterNode();
if (masterNode == null) {
logger.warn("can't send shard failed for {}, no master known.", shardRouting);
+ listener.onShardFailedNoMaster();
return;
}
- innerShardFailed(shardRouting, indexUUID, masterNode, message, failure);
+ innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, listener);
}
- public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, @Nullable final Throwable failure) {
+ public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, @Nullable final Throwable failure, Listener listener) {
logger.trace("{} re-sending failed shard for {}, indexUUID [{}], reason [{}]", failure, shardRouting.shardId(), shardRouting, indexUUID, message);
- innerShardFailed(shardRouting, indexUUID, masterNode, message, failure);
+ innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, listener);
}
- private void innerShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, final Throwable failure) {
+ private void innerShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, final Throwable failure, Listener listener) {
ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure);
transportService.sendRequest(masterNode,
SHARD_FAILED_ACTION_NAME, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleException(TransportException exp) {
logger.warn("failed to send failed shard to {}", exp, masterNode);
+ listener.onShardFailedFailure(masterNode, exp);
}
});
}
@@ -284,4 +286,9 @@ public class ShardStateAction extends AbstractComponent {
return "" + shardRouting + ", indexUUID [" + indexUUID + "], message [" + message + "], failure [" + ExceptionsHelper.detailedMessage(failure) + "]";
}
}
+
+ public interface Listener {
+ default void onShardFailedNoMaster() {}
+ default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) {}
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index 251f45e2592..53309e77400 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -76,7 +76,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
}
/**
- * Translates the provided index expression into actual concrete indices.
+ * Translates the provided index expression into actual concrete indices, properly deduplicated.
*
* @param state the cluster state containing all the data to resolve to expressions to concrete indices
* @param options defines how the aliases or indices need to be resolved to concrete indices
@@ -94,7 +94,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
}
/**
- * Translates the provided index expression into actual concrete indices.
+ * Translates the provided index expression into actual concrete indices, properly deduplicated.
*
* @param state the cluster state containing all the data to resolve to expressions to concrete indices
* @param options defines how the aliases or indices need to be resolved to concrete indices
@@ -141,7 +141,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
}
}
- List concreteIndices = new ArrayList<>(expressions.size());
+ final Set concreteIndices = new HashSet<>(expressions.size());
for (String expression : expressions) {
AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression);
if (aliasOrIndex == null) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java
index cab86b60d44..f4c5ba513f0 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java
@@ -37,9 +37,9 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.threadpool.ThreadPool;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
@@ -66,9 +66,11 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
this.nodeIndexDeletedAction = nodeIndexDeletedAction;
}
- public void deleteIndex(final Request request, final Listener userListener) {
+ public void deleteIndices(final Request request, final Listener userListener) {
+ Collection indices = Arrays.asList(request.indices);
final DeleteIndexListener listener = new DeleteIndexListener(userListener);
- clusterService.submitStateUpdateTask("delete-index [" + request.index + "]", Priority.URGENT, new ClusterStateUpdateTask() {
+
+ clusterService.submitStateUpdateTask("delete-index " + indices, Priority.URGENT, new ClusterStateUpdateTask() {
@Override
public TimeValue timeout() {
@@ -82,34 +84,32 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
@Override
public ClusterState execute(final ClusterState currentState) {
- if (!currentState.metaData().hasConcreteIndex(request.index)) {
- throw new IndexNotFoundException(request.index);
- }
-
- logger.info("[{}] deleting index", request.index);
-
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable());
- routingTableBuilder.remove(request.index);
+ MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData());
+ ClusterBlocks.Builder clusterBlocksBuilder = ClusterBlocks.builder().blocks(currentState.blocks());
- MetaData newMetaData = MetaData.builder(currentState.metaData())
- .remove(request.index)
- .build();
+ for (final String index: indices) {
+ if (!currentState.metaData().hasConcreteIndex(index)) {
+ throw new IndexNotFoundException(index);
+ }
- RoutingAllocation.Result routingResult = allocationService.reroute(
- ClusterState.builder(currentState).routingTable(routingTableBuilder.build()).metaData(newMetaData).build());
-
- ClusterBlocks blocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeIndexBlocks(request.index).build();
+ logger.debug("[{}] deleting index", index);
+ routingTableBuilder.remove(index);
+ clusterBlocksBuilder.removeIndexBlocks(index);
+ metaDataBuilder.remove(index);
+ }
// wait for events from all nodes that it has been removed from their respective metadata...
int count = currentState.nodes().size();
// add the notifications that the store was deleted from *data* nodes
count += currentState.nodes().dataNodes().size();
- final AtomicInteger counter = new AtomicInteger(count);
+ final AtomicInteger counter = new AtomicInteger(count * indices.size());
+
// this listener will be notified once we get back a notification based on the cluster state change below.
final NodeIndexDeletedAction.Listener nodeIndexDeleteListener = new NodeIndexDeletedAction.Listener() {
@Override
- public void onNodeIndexDeleted(String index, String nodeId) {
- if (index.equals(request.index)) {
+ public void onNodeIndexDeleted(String deleted, String nodeId) {
+ if (indices.contains(deleted)) {
if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this);
@@ -118,8 +118,8 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
}
@Override
- public void onNodeIndexStoreDeleted(String index, String nodeId) {
- if (index.equals(request.index)) {
+ public void onNodeIndexStoreDeleted(String deleted, String nodeId) {
+ if (indices.contains(deleted)) {
if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this);
@@ -128,15 +128,15 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
}
};
nodeIndexDeletedAction.add(nodeIndexDeleteListener);
-
- listener.future = threadPool.schedule(request.timeout, ThreadPool.Names.SAME, new Runnable() {
- @Override
- public void run() {
- listener.onResponse(new Response(false));
- nodeIndexDeletedAction.remove(nodeIndexDeleteListener);
- }
+ listener.future = threadPool.schedule(request.timeout, ThreadPool.Names.SAME, () -> {
+ listener.onResponse(new Response(false));
+ nodeIndexDeletedAction.remove(nodeIndexDeleteListener);
});
+ MetaData newMetaData = metaDataBuilder.build();
+ ClusterBlocks blocks = clusterBlocksBuilder.build();
+ RoutingAllocation.Result routingResult = allocationService.reroute(
+ ClusterState.builder(currentState).routingTable(routingTableBuilder.build()).metaData(newMetaData).build());
return ClusterState.builder(currentState).routingResult(routingResult).metaData(newMetaData).blocks(blocks).build();
}
@@ -173,7 +173,6 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
}
}
-
public interface Listener {
void onResponse(Response response);
@@ -183,13 +182,13 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
public static class Request {
- final String index;
+ final String[] indices;
TimeValue timeout = TimeValue.timeValueSeconds(10);
TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT;
- public Request(String index) {
- this.index = index;
+ public Request(String[] indices) {
+ this.indices = indices;
}
public Request timeout(TimeValue timeout) {
diff --git a/core/src/main/java/org/elasticsearch/common/cache/Cache.java b/core/src/main/java/org/elasticsearch/common/cache/Cache.java
index a686ecc9645..595ac088140 100644
--- a/core/src/main/java/org/elasticsearch/common/cache/Cache.java
+++ b/core/src/main/java/org/elasticsearch/common/cache/Cache.java
@@ -25,12 +25,11 @@ import org.elasticsearch.common.util.concurrent.ReleasableLock;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.FutureTask;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.function.BiFunction;
import java.util.function.ToLongBiFunction;
/**
@@ -175,7 +174,7 @@ public class Cache {
ReleasableLock readLock = new ReleasableLock(segmentLock.readLock());
ReleasableLock writeLock = new ReleasableLock(segmentLock.writeLock());
- Map>> map = new HashMap<>();
+ Map>> map = new HashMap<>();
SegmentStats segmentStats = new SegmentStats();
@@ -187,20 +186,28 @@ public class Cache {
* @return the entry if there was one, otherwise null
*/
Entry get(K key, long now) {
- Future> future;
+ CompletableFuture> future;
Entry entry = null;
try (ReleasableLock ignored = readLock.acquire()) {
future = map.get(key);
}
if (future != null) {
- segmentStats.hit();
- try {
- entry = future.get();
- entry.accessTime = now;
- } catch (ExecutionException | InterruptedException e) {
- throw new IllegalStateException("future should be a completedFuture for which get should not throw", e);
- }
- } else {
+ try {
+ entry = future.handle((ok, ex) -> {
+ if (ok != null) {
+ segmentStats.hit();
+ ok.accessTime = now;
+ return ok;
+ } else {
+ segmentStats.miss();
+ return null;
+ }
+ }).get();
+ } catch (ExecutionException | InterruptedException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+ else {
segmentStats.miss();
}
return entry;
@@ -216,11 +223,19 @@ public class Cache {
*/
Tuple, Entry> put(K key, V value, long now) {
Entry entry = new Entry<>(key, value, now);
- Entry existing;
+ Entry existing = null;
try (ReleasableLock ignored = writeLock.acquire()) {
try {
- Future> future = map.put(key, CompletableFuture.completedFuture(entry));
- existing = future != null ? future.get() : null;
+ CompletableFuture> future = map.put(key, CompletableFuture.completedFuture(entry));
+ if (future != null) {
+ existing = future.handle((ok, ex) -> {
+ if (ok != null) {
+ return ok;
+ } else {
+ return null;
+ }
+ }).get();
+ }
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException("future should be a completedFuture for which get should not throw", e);
}
@@ -235,17 +250,23 @@ public class Cache {
* @return the removed entry if there was one, otherwise null
*/
Entry remove(K key) {
- Future> future;
+ CompletableFuture> future;
Entry entry = null;
try (ReleasableLock ignored = writeLock.acquire()) {
future = map.remove(key);
}
if (future != null) {
- segmentStats.eviction();
try {
- entry = future.get();
+ entry = future.handle((ok, ex) -> {
+ if (ok != null) {
+ segmentStats.eviction();
+ return ok;
+ } else {
+ return null;
+ }
+ }).get();
} catch (ExecutionException | InterruptedException e) {
- throw new IllegalStateException("future should be a completedFuture for which get should not throw", e);
+ throw new IllegalStateException(e);
}
}
return entry;
@@ -327,39 +348,57 @@ public class Cache {
// the segment lock; to do this, we atomically put a future in the map that can load the value, and then
// get the value from this future on the thread that won the race to place the future into the segment map
CacheSegment segment = getCacheSegment(key);
- Future> future;
- FutureTask> task = new FutureTask<>(() -> new Entry<>(key, loader.load(key), now));
+ CompletableFuture> future;
+ CompletableFuture> completableFuture = new CompletableFuture<>();
+
try (ReleasableLock ignored = segment.writeLock.acquire()) {
- future = segment.map.putIfAbsent(key, task);
- }
- if (future == null) {
- future = task;
- task.run();
+ future = segment.map.putIfAbsent(key, completableFuture);
}
- Entry entry;
- try {
- entry = future.get();
- } catch (ExecutionException | InterruptedException e) {
- // if the future ended exceptionally, we do not want to pollute the cache
- // however, we have to take care to ensure that the polluted entry has not already been replaced
- try (ReleasableLock ignored = segment.writeLock.acquire()) {
- Future> sanity = segment.map.get(key);
- try {
- sanity.get();
- } catch (ExecutionException | InterruptedException gotcha) {
- segment.map.remove(key);
+ BiFunction super Entry, Throwable, ? extends V> handler = (ok, ex) -> {
+ if (ok != null) {
+ try (ReleasableLock ignored = lruLock.acquire()) {
+ promote(ok, now);
}
+ return ok.value;
+ } else {
+ try (ReleasableLock ignored = segment.writeLock.acquire()) {
+ CompletableFuture> sanity = segment.map.get(key);
+ if (sanity != null && sanity.isCompletedExceptionally()) {
+ segment.map.remove(key);
+ }
+ }
+ return null;
}
- throw (e instanceof ExecutionException) ? (ExecutionException)e : new ExecutionException(e);
+ };
+
+ CompletableFuture completableValue;
+ if (future == null) {
+ future = completableFuture;
+ completableValue = future.handle(handler);
+ V loaded;
+ try {
+ loaded = loader.load(key);
+ } catch (Exception e) {
+ future.completeExceptionally(e);
+ throw new ExecutionException(e);
+ }
+ if (loaded == null) {
+ NullPointerException npe = new NullPointerException("loader returned a null value");
+ future.completeExceptionally(npe);
+ throw new ExecutionException(npe);
+ } else {
+ future.complete(new Entry<>(key, loaded, now));
+ }
+ } else {
+ completableValue = future.handle(handler);
}
- if (entry.value == null) {
- throw new ExecutionException(new NullPointerException("loader returned a null value"));
+
+ try {
+ value = completableValue.get();
+ } catch (InterruptedException e) {
+ throw new IllegalStateException(e);
}
- try (ReleasableLock ignored = lruLock.acquire()) {
- promote(entry, now);
- }
- value = entry.value;
}
return value;
}
diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
index 1a3b50cc339..7130537fceb 100644
--- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
+++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java
@@ -20,8 +20,8 @@
package org.elasticsearch.common.geo;
import org.apache.lucene.util.BitUtil;
-import org.apache.lucene.util.XGeoHashUtils;
-import org.apache.lucene.util.XGeoUtils;
+import org.apache.lucene.util.GeoHashUtils;
+import org.apache.lucene.util.GeoUtils;
/**
*
@@ -81,14 +81,14 @@ public final class GeoPoint {
}
public GeoPoint resetFromIndexHash(long hash) {
- lon = XGeoUtils.mortonUnhashLon(hash);
- lat = XGeoUtils.mortonUnhashLat(hash);
+ lon = GeoUtils.mortonUnhashLon(hash);
+ lat = GeoUtils.mortonUnhashLat(hash);
return this;
}
public GeoPoint resetFromGeoHash(String geohash) {
- final long hash = XGeoHashUtils.mortonEncode(geohash);
- return this.reset(XGeoUtils.mortonUnhashLat(hash), XGeoUtils.mortonUnhashLon(hash));
+ final long hash = GeoHashUtils.mortonEncode(geohash);
+ return this.reset(GeoUtils.mortonUnhashLat(hash), GeoUtils.mortonUnhashLon(hash));
}
public GeoPoint resetFromGeoHash(long geohashLong) {
@@ -113,11 +113,11 @@ public final class GeoPoint {
}
public final String geohash() {
- return XGeoHashUtils.stringEncode(lon, lat);
+ return GeoHashUtils.stringEncode(lon, lat);
}
public final String getGeohash() {
- return XGeoHashUtils.stringEncode(lon, lat);
+ return GeoHashUtils.stringEncode(lon, lat);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
index 695db015eda..64c657c8b6f 100644
--- a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
+++ b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java
@@ -28,11 +28,7 @@ import java.util.Collection;
import java.util.List;
/**
- * Overrides bounding box logic in ShapeCollection base class to comply with
- * OGC OpenGIS Abstract Specification: An Object Model for Interoperable Geoprocessing.
- *
- * NOTE: This algorithm is O(N) and can possibly be improved O(log n) using an internal R*-Tree
- * data structure for a collection of bounding boxes
+ * Extends spatial4j ShapeCollection for points_only shape indexing support
*/
public class XShapeCollection extends ShapeCollection {
@@ -49,42 +45,4 @@ public class XShapeCollection extends ShapeCollection {
public void setPointsOnly(boolean pointsOnly) {
this.pointsOnly = pointsOnly;
}
-
- @Override
- protected Rectangle computeBoundingBox(Collection extends Shape> shapes, SpatialContext ctx) {
- Rectangle retBox = shapes.iterator().next().getBoundingBox();
- for (Shape geom : shapes) {
- retBox = expandBBox(retBox, geom.getBoundingBox());
- }
- return retBox;
- }
-
- /**
- * Spatial4J shapes have no knowledge of directed edges. For this reason, a bounding box
- * that wraps the dateline can have a min longitude that is mathematically > than the
- * Rectangles' minX value. This is an issue for geometric collections (e.g., MultiPolygon
- * and ShapeCollection) Until geometry logic can be cleaned up in Spatial4J, ES provides
- * the following expansion algorithm for GeometryCollections
- */
- private Rectangle expandBBox(Rectangle bbox, Rectangle expand) {
- if (bbox.equals(expand) || bbox.equals(SpatialContext.GEO.getWorldBounds())) {
- return bbox;
- }
-
- double minX = bbox.getMinX();
- double eMinX = expand.getMinX();
- double maxX = bbox.getMaxX();
- double eMaxX = expand.getMaxX();
- double minY = bbox.getMinY();
- double eMinY = expand.getMinY();
- double maxY = bbox.getMaxY();
- double eMaxY = expand.getMaxY();
-
- bbox.reset(Math.min(Math.min(minX, maxX), Math.min(eMinX, eMaxX)),
- Math.max(Math.max(minX, maxX), Math.max(eMinX, eMaxX)),
- Math.min(Math.min(minY, maxY), Math.min(eMinY, eMaxY)),
- Math.max(Math.max(minY, maxY), Math.max(eMinY, eMaxY)));
-
- return bbox;
- }
}
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
index 25c0743b938..16a9796d8b6 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
@@ -60,9 +60,9 @@ public class Lucene {
public static final Version VERSION = Version.LATEST;
public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION;
- public static final String LATEST_DOC_VALUES_FORMAT = "Lucene50";
+ public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
- public static final String LATEST_CODEC = "Lucene53";
+ public static final String LATEST_CODEC = "Lucene54";
static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);
diff --git a/core/src/main/java/org/elasticsearch/discovery/Discovery.java b/core/src/main/java/org/elasticsearch/discovery/Discovery.java
index 13eb86f1ce4..980543d45e6 100644
--- a/core/src/main/java/org/elasticsearch/discovery/Discovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/Discovery.java
@@ -87,4 +87,10 @@ public interface Discovery extends LifecycleComponent {
super(msg, cause, args);
}
}
+
+ /**
+ * @return stats about the discovery
+ */
+ DiscoveryStats stats();
+
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryStats.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryStats.java
new file mode 100644
index 00000000000..dcd75b07651
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryStats.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
+
+import java.io.IOException;
+
+public class DiscoveryStats implements Streamable, ToXContent {
+
+ @Nullable
+ private PendingClusterStateStats queueStats;
+
+ public DiscoveryStats(PendingClusterStateStats queueStats) {
+ this.queueStats = queueStats;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(Fields.DISCOVERY);
+
+ if (queueStats != null ){
+ queueStats.toXContent(builder, params);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ if (in.readBoolean()) {
+ queueStats = new PendingClusterStateStats();
+ queueStats.readFrom(in);
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ if (queueStats != null ) {
+ out.writeBoolean(true);
+ queueStats.writeTo(out);
+ }else{
+ out.writeBoolean(false);
+ }
+ }
+
+ static final class Fields {
+ static final XContentBuilderString DISCOVERY = new XContentBuilderString("discovery");
+ }
+
+ public PendingClusterStateStats getQueueStats() {
+ return queueStats;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
index 0979554b0c4..dd001294b97 100644
--- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
@@ -316,6 +316,11 @@ public class LocalDiscovery extends AbstractLifecycleComponent implem
}
}
+ @Override
+ public DiscoveryStats stats() {
+ return new DiscoveryStats(null);
+ }
+
private LocalDiscovery[] members() {
ClusterGroup clusterGroup = clusterGroups.get(clusterName);
if (clusterGroup == null) {
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
index 9952f65a1b8..2b126a98ce2 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
@@ -43,6 +43,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
+import org.elasticsearch.discovery.DiscoveryStats;
+import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
import org.elasticsearch.discovery.InitialStateDiscoveryListener;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.fd.MasterFaultDetection;
@@ -337,6 +339,12 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen
}
}
+ @Override
+ public DiscoveryStats stats() {
+ PendingClusterStateStats queueStats = publishClusterState.pendingStatesQueue().stats();
+ return new DiscoveryStats(queueStats);
+ }
+
/**
* returns true if zen discovery is started and there is a currently a background thread active for (re)joining
* the cluster used for testing.
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStateStats.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStateStats.java
new file mode 100644
index 00000000000..44265b0e481
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStateStats.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.publish;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+
+import java.io.IOException;
+
+/**
+ * Class encapsulating stats about the PendingClusterStatsQueue
+ */
+public class PendingClusterStateStats implements Streamable, ToXContent {
+
+ private int total;
+ private int pending;
+ private int committed;
+
+ public PendingClusterStateStats() {
+
+ }
+
+ public PendingClusterStateStats(int total, int pending, int committed) {
+ this.total = total;
+ this.pending = pending;
+ this.committed = committed;
+ }
+
+ public int getCommitted() {
+ return committed;
+ }
+
+ public int getPending() {
+ return pending;
+ }
+
+ public int getTotal() {
+ return total;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(Fields.QUEUE);
+ builder.field(Fields.TOTAL, total);
+ builder.field(Fields.PENDING, pending);
+ builder.field(Fields.COMMITTED, committed);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ total = in.readVInt();
+ pending = in.readVInt();
+ committed = in.readVInt();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(total);
+ out.writeVInt(pending);
+ out.writeVInt(committed);
+ }
+
+ static final class Fields {
+ static final XContentBuilderString QUEUE = new XContentBuilderString("cluster_state_queue");
+ static final XContentBuilderString TOTAL = new XContentBuilderString("total");
+ static final XContentBuilderString PENDING = new XContentBuilderString("pending");
+ static final XContentBuilderString COMMITTED = new XContentBuilderString("committed");
+ }
+
+ @Override
+ public String toString() {
+ return "PendingClusterStateStats(total=" + total + ", pending=" + pending + ", committed=" + committed + ")";
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
index e3550e657fc..2f444f50288 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
@@ -283,4 +283,17 @@ public class PendingClusterStatesQueue {
}
}
+ public synchronized PendingClusterStateStats stats() {
+
+ // calculate committed cluster state
+ int committed = 0;
+ for (ClusterStateContext clusterStatsContext : pendingStates) {
+ if (clusterStatsContext.committed()) {
+ committed += 1;
+ }
+ }
+
+ return new PendingClusterStateStats(pendingStates.size(), pendingStates.size() - committed, committed);
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java
index 3d07ca632b5..cb0f3113ff5 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexModule.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java
@@ -19,9 +19,14 @@
package org.elasticsearch.index;
+import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.cache.IndexCache;
+import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
+import org.elasticsearch.index.cache.query.QueryCache;
+import org.elasticsearch.index.cache.query.index.IndexQueryCache;
+import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.engine.InternalEngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
@@ -33,6 +38,8 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig;
+import org.elasticsearch.indices.IndicesWarmer;
+import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import java.util.*;
import java.util.function.BiFunction;
@@ -50,25 +57,37 @@ import java.util.function.Consumer;
* Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}
*
*/
-public class IndexModule extends AbstractModule {
+public final class IndexModule extends AbstractModule {
public static final String STORE_TYPE = "index.store.type";
public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity";
+ public static final String INDEX_QUERY_CACHE = "index";
+ public static final String NONE_QUERY_CACHE = "none";
+ public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
+ // for test purposes only
+ public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
private final IndexSettings indexSettings;
private final IndexStoreConfig indexStoreConfig;
+ private final IndicesQueryCache indicesQueryCache;
// pkg private so tests can mock
Class extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
- Class extends IndexSearcherWrapper> indexSearcherWrapper = null;
+ private SetOnce indexSearcherWrapper = new SetOnce<>();
private final Set> settingsConsumers = new HashSet<>();
private final Set indexEventListeners = new HashSet<>();
private IndexEventListener listener;
private final Map> similarities = new HashMap<>();
private final Map> storeTypes = new HashMap<>();
+ private final Map> queryCaches = new HashMap<>();
+ private IndicesWarmer indicesWarmer;
- public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig) {
+ public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, IndicesQueryCache indicesQueryCache, IndicesWarmer warmer) {
this.indexStoreConfig = indexStoreConfig;
this.indexSettings = indexSettings;
+ this.indicesQueryCache = indicesQueryCache;
+ this.indicesWarmer = warmer;
+ registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache::new);
+ registerQueryCache(NONE_QUERY_CACHE, (a, b) -> new NoneQueryCache(a));
}
/**
@@ -155,6 +174,28 @@ public class IndexModule extends AbstractModule {
similarities.put(name, similarity);
}
+ /**
+ * Registers a {@link QueryCache} provider for a given name
+ * @param name the providers / caches name
+ * @param provider the provider instance
+ */
+ public void registerQueryCache(String name, BiFunction provider) {
+ if (provider == null) {
+ throw new IllegalArgumentException("provider must not be null");
+ }
+ if (queryCaches.containsKey(name)) {
+ throw new IllegalArgumentException("Can't register the same [query_cache] more than once for [" + name + "]");
+ }
+ queryCaches.put(name, provider);
+ }
+
+ /**
+ * Sets a {@link org.elasticsearch.index.IndexModule.IndexSearcherWrapperFactory} that is called once the IndexService is fully constructed.
+ * Note: this method can only be called once per index. Multiple wrappers are not supported.
+ */
+ public void setSearcherWrapper(IndexSearcherWrapperFactory indexSearcherWrapperFactory) {
+ this.indexSearcherWrapper.set(indexSearcherWrapperFactory);
+ }
public IndexEventListener freeze() {
// TODO somehow we need to make this pkg private...
@@ -176,11 +217,7 @@ public class IndexModule extends AbstractModule {
@Override
protected void configure() {
bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton();
- if (indexSearcherWrapper == null) {
- bind(IndexSearcherWrapper.class).toProvider(Providers.of(null));
- } else {
- bind(IndexSearcherWrapper.class).to(indexSearcherWrapper).asEagerSingleton();
- }
+ bind(IndexSearcherWrapperFactory.class).toInstance(indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get());
bind(IndexEventListener.class).toInstance(freeze());
bind(IndexService.class).asEagerSingleton();
bind(IndexServicesProvider.class).asEagerSingleton();
@@ -203,6 +240,15 @@ public class IndexModule extends AbstractModule {
throw new IllegalStateException("store must not be null");
}
}
+
+ final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
+ BiFunction queryCacheProvider = queryCaches.get(queryCacheType);
+ BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, indicesWarmer);
+ QueryCache queryCache = queryCacheProvider.apply(settings, indicesQueryCache);
+ IndexCache indexCache = new IndexCache(settings, queryCache, bitsetFilterCache);
+ bind(QueryCache.class).toInstance(queryCache);
+ bind(IndexCache.class).toInstance(indexCache);
+ bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
bind(IndexStore.class).toInstance(store);
bind(SimilarityService.class).toInstance(new SimilarityService(settings, similarities));
}
@@ -224,4 +270,14 @@ public class IndexModule extends AbstractModule {
return getSettingsKey().equals(setting);
}
}
+
+ /**
+ * Factory for creating new {@link IndexSearcherWrapper} instances
+ */
+ public interface IndexSearcherWrapperFactory {
+ /**
+ * Returns a new IndexSearcherWrapper. This method is called once per index per node
+ */
+ IndexSearcherWrapper newWrapper(final IndexService indexService);
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index 70ff320ebbe..02cad6212df 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -75,6 +75,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
private final IndicesService indicesServices;
private final IndexServicesProvider indexServicesProvider;
private final IndexStore indexStore;
+ private final IndexSearcherWrapper searcherWrapper;
private volatile Map shards = emptyMap();
private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false);
@@ -88,7 +89,8 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
IndicesService indicesServices,
IndexServicesProvider indexServicesProvider,
IndexStore indexStore,
- IndexEventListener eventListener) {
+ IndexEventListener eventListener,
+ IndexModule.IndexSearcherWrapperFactory wrapperFactory) {
super(indexSettings);
this.indexSettings = indexSettings;
this.analysisService = analysisService;
@@ -101,6 +103,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
this.indexStore = indexStore;
indexFieldData.setListener(new FieldDataCacheListener(this));
bitSetFilterCache.setListener(new BitsetCacheListener(this));
+ this.searcherWrapper = wrapperFactory.newWrapper(this);
}
public int numberOfShards() {
@@ -265,9 +268,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId)));
if (useShadowEngine(primary, indexSettings)) {
- indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexServicesProvider);
+ indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
} else {
- indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexServicesProvider);
+ indexShard = new IndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
}
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
diff --git a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java
index d61c911ab7d..1043d581af3 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java
@@ -56,12 +56,11 @@ public final class IndexServicesProvider {
private final SimilarityService similarityService;
private final EngineFactory factory;
private final BigArrays bigArrays;
- private final IndexSearcherWrapper indexSearcherWrapper;
private final IndexingMemoryController indexingMemoryController;
private final IndexEventListener listener;
@Inject
- public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, @Nullable IndexSearcherWrapper indexSearcherWrapper, IndexingMemoryController indexingMemoryController) {
+ public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController) {
this.listener = listener;
this.threadPool = threadPool;
this.mapperService = mapperService;
@@ -75,7 +74,6 @@ public final class IndexServicesProvider {
this.similarityService = similarityService;
this.factory = factory;
this.bigArrays = bigArrays;
- this.indexSearcherWrapper = indexSearcherWrapper;
this.indexingMemoryController = indexingMemoryController;
}
@@ -126,13 +124,7 @@ public final class IndexServicesProvider {
return factory;
}
- public BigArrays getBigArrays() {
- return bigArrays;
- }
-
- public IndexSearcherWrapper getIndexSearcherWrapper() {
- return indexSearcherWrapper;
- }
+ public BigArrays getBigArrays() { return bigArrays; }
public IndexingMemoryController getIndexingMemoryController() {
return indexingMemoryController;
diff --git a/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java b/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java
index 67a7e717021..61733f24695 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/IndexCache.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.cache;
import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
@@ -37,7 +36,6 @@ public class IndexCache extends AbstractIndexComponent implements Closeable {
private final QueryCache queryCache;
private final BitsetFilterCache bitsetFilterCache;
- @Inject
public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache) {
super(indexSettings);
this.queryCache = queryCache;
diff --git a/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java b/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java
deleted file mode 100644
index 86e20490fa1..00000000000
--- a/core/src/main/java/org/elasticsearch/index/cache/IndexCacheModule.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.cache;
-
-import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.util.ExtensionPoint;
-import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
-import org.elasticsearch.index.cache.query.QueryCache;
-import org.elasticsearch.index.cache.query.index.IndexQueryCache;
-import org.elasticsearch.index.cache.query.none.NoneQueryCache;
-
-public class IndexCacheModule extends AbstractModule {
-
- public static final String INDEX_QUERY_CACHE = "index";
- public static final String NONE_QUERY_CACHE = "none";
- public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
- // for test purposes only
- public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
-
- private final Settings indexSettings;
- private final ExtensionPoint.SelectedType queryCaches;
-
- public IndexCacheModule(Settings settings) {
- this.indexSettings = settings;
- this.queryCaches = new ExtensionPoint.SelectedType<>("query_cache", QueryCache.class);
-
- registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache.class);
- registerQueryCache(NONE_QUERY_CACHE, NoneQueryCache.class);
- }
-
- public void registerQueryCache(String name, Class extends QueryCache> clazz) {
- queryCaches.registerExtension(name, clazz);
- }
-
- @Override
- protected void configure() {
- queryCaches.bindType(binder(), indexSettings, QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
- bind(BitsetFilterCache.class).asEagerSingleton();
- bind(IndexCache.class).asEagerSingleton();
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
index 994033273a3..7ef4c5b3da1 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
@@ -32,16 +32,15 @@ import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
@@ -50,7 +49,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.IndicesWarmer.TerminationHandle;
-import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
@@ -68,7 +66,7 @@ import java.util.concurrent.Executor;
* and require that it should always be around should use this cache, otherwise the
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
*/
-public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable {
+public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable {
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
private static final Listener DEFAULT_NOOP_LISTENER = new Listener() {
@@ -85,24 +83,17 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
private final Cache> loadedFilters;
private volatile Listener listener = DEFAULT_NOOP_LISTENER;
private final BitSetProducerWarmer warmer;
+ private final IndicesWarmer indicesWarmer;
- private IndicesWarmer indicesWarmer;
-
- @Inject
- public BitsetFilterCache(IndexSettings indexSettings) {
+ public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer) {
super(indexSettings);
this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
this.loadedFilters = CacheBuilder.>builder().removalListener(this).build();
this.warmer = new BitSetProducerWarmer();
- }
-
-
- @Inject(optional = true)
- public void setIndicesWarmer(IndicesWarmer indicesWarmer) {
this.indicesWarmer = indicesWarmer;
indicesWarmer.addListener(warmer);
}
-
+
/**
* Sets a listener that is invoked for all subsequent cache and removal events.
* @throws IllegalStateException if the listener is set more than once
@@ -129,10 +120,11 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
@Override
public void close() {
- if (indicesWarmer != null) {
+ try {
indicesWarmer.removeListener(warmer);
+ } finally {
+ clear("close");
}
- clear("close");
}
public void clear(String reason) {
@@ -229,10 +221,10 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
}
}
- final class BitSetProducerWarmer extends IndicesWarmer.Listener {
+ final class BitSetProducerWarmer implements IndicesWarmer.Listener {
@Override
- public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) {
+ public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
if (!loadRandomAccessFiltersEagerly) {
return TerminationHandle.NO_WAIT;
}
@@ -258,9 +250,9 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
warmUp.add(Queries.newNonNestedFilter());
}
- final Executor executor = threadPool.executor(executor());
- final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
- for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
+ final Executor executor = indicesWarmer.getExecutor();
+ final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
+ for (final LeafReaderContext ctx : searcher.reader().leaves()) {
for (final Query filterToWarm : warmUp) {
executor.execute(() -> {
try {
@@ -281,7 +273,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
}
@Override
- public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) {
+ public TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher) {
return TerminationHandle.NO_WAIT;
}
diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
index 9a15d853eda..f7c53cf20f0 100644
--- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
+++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
@@ -21,7 +21,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene53.Lucene53Codec;
+import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@@ -56,8 +56,8 @@ public class CodecService extends AbstractIndexComponent {
this.mapperService = mapperService;
MapBuilder codecs = MapBuilder.newMapBuilder();
if (mapperService == null) {
- codecs.put(DEFAULT_CODEC, new Lucene53Codec());
- codecs.put(BEST_COMPRESSION_CODEC, new Lucene53Codec(Mode.BEST_COMPRESSION));
+ codecs.put(DEFAULT_CODEC, new Lucene54Codec());
+ codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION));
} else {
codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));
diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
index b8e44bdadb6..b504c4c21c5 100644
--- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
+++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
@@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
-import org.apache.lucene.codecs.lucene53.Lucene53Codec;
+import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
* configured for a specific field the default postings format is used.
*/
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
-public class PerFieldMappingPostingFormatCodec extends Lucene53Codec {
+public class PerFieldMappingPostingFormatCodec extends Lucene54Codec {
private final ESLogger logger;
private final MapperService mapperService;
diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
index 81f27097c81..46f144ea5f2 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
@@ -1056,4 +1056,18 @@ public abstract class Engine implements Closeable {
public long getLastWriteNanos() {
return this.lastWriteNanos;
}
+
+ /**
+ * Called for each new opened engine searcher to warm new segments
+ * @see EngineConfig#getWarmer()
+ */
+ public interface Warmer {
+ /**
+ * Called once a new Searcher is opened.
+ * @param searcher the searcer to warm
+ * @param isTopLevelReader true
iff the searcher is build from a top-level reader.
+ * Otherwise the searcher might be build from a leaf reader to warm in isolation
+ */
+ void warm(Engine.Searcher searcher, boolean isTopLevelReader);
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
index c3015c6e560..f4337de266e 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
@@ -61,8 +61,7 @@ public final class EngineConfig {
private final String codecName;
private final ThreadPool threadPool;
private final ShardIndexingService indexingService;
- @Nullable
- private final IndicesWarmer warmer;
+ private final Engine.Warmer warmer;
private final Store store;
private final SnapshotDeletionPolicy deletionPolicy;
private final MergePolicy mergePolicy;
@@ -116,7 +115,7 @@ public final class EngineConfig {
* Creates a new {@link org.elasticsearch.index.engine.EngineConfig}
*/
public EngineConfig(ShardId shardId, ThreadPool threadPool, ShardIndexingService indexingService,
- Settings indexSettings, IndicesWarmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy,
+ Settings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy,
MergePolicy mergePolicy, MergeSchedulerConfig mergeSchedulerConfig, Analyzer analyzer,
Similarity similarity, CodecService codecService, Engine.EventListener eventListener,
TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig, TimeValue flushMergesAfter) {
@@ -124,7 +123,7 @@ public final class EngineConfig {
this.indexSettings = indexSettings;
this.threadPool = threadPool;
this.indexingService = indexingService;
- this.warmer = warmer;
+ this.warmer = warmer == null ? (a,b) -> {} : warmer;
this.store = store;
this.deletionPolicy = deletionPolicy;
this.mergePolicy = mergePolicy;
@@ -267,11 +266,9 @@ public final class EngineConfig {
}
/**
- * Returns an {@link org.elasticsearch.indices.IndicesWarmer} used to warm new searchers before they are used for searching.
- * Note: This method might retrun null
+ * Returns an {@link org.elasticsearch.index.engine.Engine.Warmer} used to warm new searchers before they are used for searching.
*/
- @Nullable
- public IndicesWarmer getWarmer() {
+ public Engine.Warmer getWarmer() {
return warmer;
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 5d3b9388582..9ab09a245ea 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -79,8 +79,7 @@ public class InternalEngine extends Engine {
private volatile long lastDeleteVersionPruneTimeMSec;
private final ShardIndexingService indexingService;
- @Nullable
- private final IndicesWarmer warmer;
+ private final Engine.Warmer warmer;
private final Translog translog;
private final ElasticsearchConcurrentMergeScheduler mergeScheduler;
@@ -930,8 +929,7 @@ public class InternalEngine extends Engine {
assert isMergedSegment(esLeafReader);
if (warmer != null) {
final Engine.Searcher searcher = new Searcher("warmer", searcherFactory.newSearcher(esLeafReader, null));
- final IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, searcher);
- warmer.warmNewReaders(context);
+ warmer.warm(searcher, false);
}
} catch (Throwable t) {
// Don't fail a merge if the warm-up failed
@@ -955,7 +953,7 @@ public class InternalEngine extends Engine {
/** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */
final static class SearchFactory extends EngineSearcherFactory {
- private final IndicesWarmer warmer;
+ private final Engine.Warmer warmer;
private final ShardId shardId;
private final ESLogger logger;
private final AtomicBoolean isEngineClosed;
@@ -1014,11 +1012,10 @@ public class InternalEngine extends Engine {
}
if (newSearcher != null) {
- IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, new Searcher("new_reader_warming", newSearcher));
- warmer.warmNewReaders(context);
+ warmer.warm(new Searcher("new_reader_warming", newSearcher), false);
}
assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass();
- warmer.warmTopReader(new IndicesWarmer.WarmerContext(shardId, new Searcher("top_reader_warming", searcher)));
+ warmer.warm(new Searcher("top_reader_warming", searcher), true);
} catch (Throwable e) {
if (isEngineClosed.get() == false) {
logger.warn("failed to prepare/warm", e);
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
index b264bfa4bc3..eb0939d9339 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
@@ -25,7 +25,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
@@ -82,7 +82,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
public static final boolean ENABLE_LATLON = false;
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
- public static final int GEO_HASH_PRECISION = XGeoHashUtils.PRECISION;
+ public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final Explicit IGNORE_MALFORMED = new Explicit(false, false);
public static final Explicit COERCE = new Explicit(false, false);
@@ -705,7 +705,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
}
if (fieldType().isGeohashEnabled()) {
if (geohash == null) {
- geohash = XGeoHashUtils.stringEncode(point.lon(), point.lat());
+ geohash = GeoHashUtils.stringEncode(point.lon(), point.lat());
}
addGeohashField(context, geohash);
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
index 340094af7ce..d0094344b0b 100644
--- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
@@ -19,16 +19,14 @@
package org.elasticsearch.index.query;
-import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser;
+
import java.io.IOException;
public class FuzzyQueryParser implements QueryParser {
- private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
-
@Override
public String[] names() {
return new String[]{ FuzzyQueryBuilder.NAME };
@@ -68,7 +66,7 @@ public class FuzzyQueryParser implements QueryParser {
value = parser.objectBytes();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
- } else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
prefixLength = parser.intValue();
diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java
index a385e978487..97d403ff1c9 100644
--- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java
+++ b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java
@@ -20,7 +20,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
@@ -131,7 +131,7 @@ public class GeohashCellQuery {
}
public Builder point(double lat, double lon) {
- this.geohash = XGeoHashUtils.stringEncode(lon, lat);
+ this.geohash = GeoHashUtils.stringEncode(lon, lat);
return this;
}
@@ -205,7 +205,7 @@ public class GeohashCellQuery {
Query query;
if (neighbors) {
- query = create(context, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList(8)));
+ query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList(8)));
} else {
query = create(context, geoFieldType, geohash, null);
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java
index ede4fa98a76..8793de10367 100644
--- a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java
+++ b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java
@@ -34,7 +34,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
@@ -213,29 +212,20 @@ public class IndexQueryParserService extends AbstractIndexComponent {
/**
* Selectively parses a query from a top level query or query_binary json field from the specified source.
*/
- public ParsedQuery parseQuery(BytesReference source) {
+ public ParsedQuery parseTopLevelQuery(BytesReference source) {
XContentParser parser = null;
try {
- parser = XContentHelper.createParser(source);
- ParsedQuery parsedQuery = null;
- for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
- if (token == XContentParser.Token.FIELD_NAME) {
- String fieldName = parser.currentName();
- if ("query".equals(fieldName)) {
- parsedQuery = parse(parser);
- } else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) {
- byte[] querySource = parser.binaryValue();
- XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
- parsedQuery = parse(qSourceParser);
- } else {
- throw new ParsingException(parser.getTokenLocation(), "request does not support [" + fieldName + "]");
- }
- }
+ parser = XContentFactory.xContent(source).createParser(source);
+ QueryShardContext queryShardContext = cache.get();
+ queryShardContext.reset(parser);
+ queryShardContext.parseFieldMatcher(parseFieldMatcher);
+ try {
+ QueryBuilder> queryBuilder = queryShardContext.parseContext().parseTopLevelQueryBuilder();
+ Query query = toQuery(queryBuilder, queryShardContext);
+ return new ParsedQuery(query, queryShardContext.copyNamedQueries());
+ } finally {
+ queryShardContext.reset(null);
}
- if (parsedQuery == null) {
- throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
- }
- return parsedQuery;
} catch (ParsingException | QueryShardException e) {
throw e;
} catch (Throwable e) {
diff --git a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java
index 674cad70872..416da97942e 100644
--- a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java
@@ -32,8 +32,8 @@ import java.util.Collection;
*/
public class IndicesQueryParser implements QueryParser {
- private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
- private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter");
+ private static final ParseField QUERY_FIELD = new ParseField("query");
+ private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query");
@Override
public String[] names() {
diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java
index 1fabfede29d..044a49d23d7 100644
--- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java
@@ -20,16 +20,15 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.join.ScoreMode;
-import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.QueryInnerHits;
+
import java.io.IOException;
public class NestedQueryParser implements QueryParser {
- private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
@Override
@@ -54,8 +53,6 @@ public class NestedQueryParser implements QueryParser {
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder();
- } else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
- query = parseContext.parseInnerQueryBuilder();
} else if ("inner_hits".equals(currentFieldName)) {
queryInnerHits = new QueryInnerHits(parser);
} else {
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java
deleted file mode 100644
index 4ca9e1598e2..00000000000
--- a/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.query;
-
-import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.Query;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-
-import java.io.IOException;
-import java.util.Objects;
-
-/**
- * A filter that simply wraps a query.
- * @deprecated Useless now that queries and filters are merged: pass the
- * query as a filter directly.
- */
-//TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed
-@Deprecated
-public class QueryFilterBuilder extends AbstractQueryBuilder {
-
- public static final String NAME = "query";
-
- private final QueryBuilder queryBuilder;
-
- static final QueryFilterBuilder PROTOTYPE = new QueryFilterBuilder(EmptyQueryBuilder.PROTOTYPE);
-
- /**
- * A filter that simply wraps a query.
- *
- * @param queryBuilder The query to wrap as a filter
- */
- public QueryFilterBuilder(QueryBuilder queryBuilder) {
- if (queryBuilder == null) {
- throw new IllegalArgumentException("inner query cannot be null");
- }
- this.queryBuilder = queryBuilder;
- }
-
- /**
- * @return the query builder that is wrapped by this {@link QueryFilterBuilder}
- */
- public QueryBuilder innerQuery() {
- return this.queryBuilder;
- }
-
- @Override
- protected void doXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field(NAME);
- queryBuilder.toXContent(builder, params);
- }
-
- @Override
- protected Query doToQuery(QueryShardContext context) throws IOException {
- // inner query builder can potentially be `null`, in that case we ignore it
- Query innerQuery = this.queryBuilder.toQuery(context);
- if (innerQuery == null) {
- return null;
- }
- return new ConstantScoreQuery(innerQuery);
- }
-
- @Override
- protected void setFinalBoost(Query query) {
- //no-op this query doesn't support boost
- }
-
- @Override
- protected int doHashCode() {
- return Objects.hash(queryBuilder);
- }
-
- @Override
- protected boolean doEquals(QueryFilterBuilder other) {
- return Objects.equals(queryBuilder, other.queryBuilder);
- }
-
- @Override
- protected QueryFilterBuilder doReadFrom(StreamInput in) throws IOException {
- QueryBuilder innerQueryBuilder = in.readQuery();
- return new QueryFilterBuilder(innerQueryBuilder);
- }
-
- @Override
- protected void doWriteTo(StreamOutput out) throws IOException {
- out.writeQuery(queryBuilder);
- }
-
- @Override
- public String getWriteableName() {
- return NAME;
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java
index 0885d040179..70a6a18aab2 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java
@@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@@ -65,9 +66,47 @@ public class QueryParseContext {
}
/**
- * @return a new QueryBuilder based on the current state of the parser
+ * Parses a top level query including the query element that wraps it
*/
- public QueryBuilder parseInnerQueryBuilder() throws IOException {
+ public QueryBuilder> parseTopLevelQueryBuilder() {
+ try {
+ QueryBuilder> queryBuilder = null;
+ for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ String fieldName = parser.currentName();
+ if ("query".equals(fieldName)) {
+ queryBuilder = parseInnerQueryBuilder();
+ } else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) {
+ byte[] querySource = parser.binaryValue();
+ XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
+ QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
+ queryParseContext.reset(qSourceParser);
+ try {
+ queryParseContext.parseFieldMatcher(parseFieldMatcher);
+ queryBuilder = queryParseContext.parseInnerQueryBuilder();
+ } finally {
+ queryParseContext.reset(null);
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "request does not support [" + parser.currentName() + "]");
+ }
+ }
+ }
+ if (queryBuilder == null) {
+ throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
+ }
+ return queryBuilder;
+ } catch (ParsingException e) {
+ throw e;
+ } catch (Throwable e) {
+ throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
+ }
+ }
+
+ /**
+ * Parses a query excluding the query element that wraps it
+ */
+ public QueryBuilder> parseInnerQueryBuilder() throws IOException {
// move to START object
XContentParser.Token token;
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java
index f5dbb250805..9dfee553b55 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java
@@ -19,7 +19,6 @@
package org.elasticsearch.index.query;
-import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.Fuzziness;
@@ -35,8 +34,6 @@ import java.util.Map;
*/
public class QueryStringQueryParser implements QueryParser {
- private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("fuzzy_min_sim");
-
@Override
public String[] names() {
return new String[]{QueryStringQueryBuilder.NAME, Strings.toCamelCase(QueryStringQueryBuilder.NAME)};
@@ -134,7 +131,7 @@ public class QueryStringQueryParser implements QueryParser {
fuzzyRewrite = parser.textOrNull();
} else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
phraseSlop = parser.intValue();
- } else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java
index f8b0deaf9be..b38552663d1 100644
--- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java
@@ -70,7 +70,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e);
}
}
- return super.simplify(bq.build());
+ return simplify(bq.build());
}
/**
@@ -93,7 +93,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e);
}
}
- return super.simplify(bq.build());
+ return simplify(bq.build());
}
@Override
@@ -111,7 +111,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e);
}
}
- return super.simplify(bq.build());
+ return simplify(bq.build());
}
/**
@@ -140,7 +140,19 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
return rethrowUnlessLenient(e);
}
}
- return super.simplify(bq.build());
+ return simplify(bq.build());
+ }
+
+ /**
+ * Override of lucenes SimpleQueryParser that doesn't simplify for the 1-clause case.
+ */
+ @Override
+ protected Query simplify(BooleanQuery bq) {
+ if (bq.clauses().isEmpty()) {
+ return null;
+ } else {
+ return bq;
+ }
}
/**
@@ -295,7 +307,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
// For further reasoning see
// https://issues.apache.org/jira/browse/LUCENE-4021
return (Objects.equals(locale.toLanguageTag(), other.locale.toLanguageTag())
- && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms)
+ && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms)
&& Objects.equals(lenient, other.lenient)
&& Objects.equals(analyzeWildcard, other.analyzeWildcard));
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
index a93c60ec147..2b221ed9ab0 100644
--- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
@@ -20,6 +20,8 @@
package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
@@ -285,8 +287,20 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder 1
+ && ((booleanQuery.clauses().iterator().next().getQuery() instanceof BooleanQuery) == false)) {
+ // special case for one term query and more than one field: (f1:t1 f2:t1 f3:t1)
+ // we need to wrap this in additional BooleanQuery so minimum_should_match is applied correctly
+ BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ builder.add(new BooleanClause(booleanQuery, Occur.SHOULD));
+ booleanQuery = builder.build();
+ }
+ if (minimumShouldMatch != null) {
+ booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
+ }
+ query = booleanQuery;
}
return query;
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
index 97508a8a16f..c90034cb04d 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
@@ -57,24 +57,18 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
static final TermsQueryBuilder PROTOTYPE = new TermsQueryBuilder("field", "value");
- public static final boolean DEFAULT_DISABLE_COORD = false;
-
private final String fieldName;
private final List values;
- @Deprecated
- private String minimumShouldMatch;
- @Deprecated
- private boolean disableCoord = DEFAULT_DISABLE_COORD;
private final TermsLookup termsLookup;
public TermsQueryBuilder(String fieldName, TermsLookup termsLookup) {
- this(fieldName, null, null, DEFAULT_DISABLE_COORD, termsLookup);
+ this(fieldName, null, termsLookup);
}
/**
* constructor used internally for serialization of both value / termslookup variants
*/
- TermsQueryBuilder(String fieldName, List values, String minimumShouldMatch, boolean disableCoord, TermsLookup termsLookup) {
+ TermsQueryBuilder(String fieldName, List values, TermsLookup termsLookup) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name cannot be null.");
}
@@ -86,8 +80,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
}
this.fieldName = fieldName;
this.values = values;
- this.disableCoord = disableCoord;
- this.minimumShouldMatch = minimumShouldMatch;
this.termsLookup = termsLookup;
}
@@ -178,34 +170,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
return convertToStringListIfBytesRefList(this.values);
}
- /**
- * Sets the minimum number of matches across the provided terms. Defaults to 1 .
- * @deprecated use [bool] query instead
- */
- @Deprecated
- public TermsQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
- this.minimumShouldMatch = minimumShouldMatch;
- return this;
- }
-
- public String minimumShouldMatch() {
- return this.minimumShouldMatch;
- }
-
- /**
- * Disables Similarity#coord(int,int) in scoring. Defaults to false .
- * @deprecated use [bool] query instead
- */
- @Deprecated
- public TermsQueryBuilder disableCoord(boolean disableCoord) {
- this.disableCoord = disableCoord;
- return this;
- }
-
- boolean disableCoord() {
- return this.disableCoord;
- }
-
public TermsLookup termsLookup() {
return this.termsLookup;
}
@@ -252,12 +216,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
} else {
builder.field(fieldName, convertToStringListIfBytesRefList(values));
}
- if (minimumShouldMatch != null) {
- builder.field("minimum_should_match", minimumShouldMatch);
- }
- if (disableCoord != DEFAULT_DISABLE_COORD) {
- builder.field("disable_coord", disableCoord);
- }
printBoostAndQueryName(builder);
builder.endObject();
}
@@ -284,7 +242,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
if (terms == null || terms.isEmpty()) {
return Queries.newMatchNoDocsQuery();
}
- return handleTermsQuery(terms, fieldName, context, minimumShouldMatch, disableCoord);
+ return handleTermsQuery(terms, fieldName, context);
}
private List fetch(TermsLookup termsLookup, Client client) {
@@ -300,7 +258,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
return terms;
}
- private static Query handleTermsQuery(List terms, String fieldName, QueryShardContext context, String minimumShouldMatch, boolean disableCoord) {
+ private static Query handleTermsQuery(List terms, String fieldName, QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName);
String indexFieldName;
if (fieldType != null) {
@@ -322,7 +280,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
}
} else {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
- bq.setDisableCoord(disableCoord);
for (Object term : terms) {
if (fieldType != null) {
bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD);
@@ -330,7 +287,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD);
}
}
- query = Queries.applyMinimumShouldMatch(bq.build(), minimumShouldMatch);
+ query = bq.build();
}
return query;
}
@@ -344,9 +301,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
lookup = TermsLookup.readTermsLookupFrom(in);
}
List values = (List) in.readGenericValue();
- String minimumShouldMatch = in.readOptionalString();
- boolean disableCoord = in.readBoolean();
- return new TermsQueryBuilder(field, values, minimumShouldMatch, disableCoord, lookup);
+ return new TermsQueryBuilder(field, values, lookup);
}
@Override
@@ -357,21 +312,17 @@ public class TermsQueryBuilder extends AbstractQueryBuilder {
termsLookup.writeTo(out);
}
out.writeGenericValue(values);
- out.writeOptionalString(minimumShouldMatch);
- out.writeBoolean(disableCoord);
}
@Override
protected int doHashCode() {
- return Objects.hash(fieldName, values, minimumShouldMatch, disableCoord, termsLookup);
+ return Objects.hash(fieldName, values, termsLookup);
}
@Override
protected boolean doEquals(TermsQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(values, other.values) &&
- Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
- Objects.equals(disableCoord, other.disableCoord) &&
Objects.equals(termsLookup, other.termsLookup);
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
index c76369195a3..69fe62901e7 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
@@ -19,7 +19,6 @@
package org.elasticsearch.index.query;
-import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
@@ -38,11 +37,6 @@ import java.util.List;
*/
public class TermsQueryParser implements QueryParser {
- private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match", "minimum_should_match")
- .withAllDeprecated("Use [bool] query instead");
- private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
- private static final ParseField EXECUTION_FIELD = new ParseField("execution").withAllDeprecated("execution is deprecated and has no effect");
-
@Override
public String[] names() {
return new String[]{TermsQueryBuilder.NAME, "in"};
@@ -54,8 +48,6 @@ public class TermsQueryParser implements QueryParser {
String fieldName = null;
List values = null;
- String minShouldMatch = null;
- boolean disableCoord = TermsQueryBuilder.DEFAULT_DISABLE_COORD;
TermsLookup termsLookup = null;
String queryName = null;
@@ -78,17 +70,8 @@ public class TermsQueryParser implements QueryParser {
fieldName = currentFieldName;
termsLookup = TermsLookup.parseTermsLookup(parser);
} else if (token.isValue()) {
- if (parseContext.parseFieldMatcher().match(currentFieldName, EXECUTION_FIELD)) {
- // ignore
- } else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) {
- if (minShouldMatch != null) {
- throw new IllegalArgumentException("[" + currentFieldName + "] is not allowed in a filter context for the [" + TermsQueryBuilder.NAME + "] query");
- }
- minShouldMatch = parser.textOrNull();
- } else if ("boost".equals(currentFieldName)) {
+ if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
- } else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
- disableCoord = parser.booleanValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
@@ -100,7 +83,7 @@ public class TermsQueryParser implements QueryParser {
if (fieldName == null) {
throw new ParsingException(parser.getTokenLocation(), "terms query requires a field name, followed by array of terms or a document lookup specification");
}
- return new TermsQueryBuilder(fieldName, values, minShouldMatch, disableCoord, termsLookup)
+ return new TermsQueryBuilder(fieldName, values, termsLookup)
.boost(boost)
.queryName(queryName);
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java
index 7adde617009..d738f3a259d 100644
--- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java
@@ -46,7 +46,6 @@ public class FunctionScoreQueryParser implements QueryParser warmer.warm(searcher, this, idxSettings, toLevel);
return new EngineConfig(shardId,
- threadPool, indexingService, indexSettings, warmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
+ threadPool, indexingService, indexSettings, engineWarmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, indexingMemoryController.getInactiveTime());
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java
index 26e8e55b64f..ae746ab5ee7 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java
@@ -37,8 +37,8 @@ import org.elasticsearch.index.translog.TranslogStats;
*/
public final class ShadowIndexShard extends IndexShard {
- public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) throws IOException {
- super(shardId, indexSettings, path, store, provider);
+ public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexSearcherWrapper wrapper, IndexServicesProvider provider) throws IOException {
+ super(shardId, indexSettings, path, store, wrapper, provider);
}
/**
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java
index 89a3ae21fb4..8246ed9e608 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java
@@ -24,11 +24,9 @@ import org.elasticsearch.action.update.UpdateHelper;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser;
-import org.elasticsearch.index.query.MoreLikeThisQueryParser;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.indices.analysis.HunspellService;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
@@ -105,7 +103,6 @@ public class IndicesModule extends AbstractModule {
registerQueryParser(GeoBoundingBoxQueryParser.class);
registerQueryParser(GeohashCellQuery.Parser.class);
registerQueryParser(GeoPolygonQueryParser.class);
- registerQueryParser(QueryFilterParser.class);
registerQueryParser(ExistsQueryParser.class);
registerQueryParser(MissingQueryParser.class);
registerQueryParser(MatchNoneQueryParser.class);
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
index 5e846420bd0..883add00665 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -44,7 +44,6 @@ import org.elasticsearch.index.*;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache;
-import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats;
@@ -61,6 +60,7 @@ import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
+import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.plugins.PluginsService;
@@ -94,6 +94,8 @@ public class IndicesService extends AbstractLifecycleComponent i
private final PluginsService pluginsService;
private final NodeEnvironment nodeEnv;
private final TimeValue shardsClosedTimeout;
+ private final IndicesWarmer indicesWarmer;
+ private final IndicesQueryCache indicesQueryCache;
private volatile Map indices = emptyMap();
@@ -121,12 +123,14 @@ public class IndicesService extends AbstractLifecycleComponent i
private final IndexStoreConfig indexStoreConfig;
@Inject
- public IndicesService(Settings settings, IndicesAnalysisService indicesAnalysisService, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService) {
+ public IndicesService(Settings settings, IndicesAnalysisService indicesAnalysisService, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer) {
super(settings);
this.indicesAnalysisService = indicesAnalysisService;
this.injector = injector;
this.pluginsService = pluginsService;
this.nodeEnv = nodeEnv;
+ this.indicesWarmer = indicesWarmer;
+ this.indicesQueryCache = indicesQueryCache;
this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS));
this.indexStoreConfig = new IndexStoreConfig(settings);
nodeSettingsService.addListener(indexStoreConfig);
@@ -306,13 +310,12 @@ public class IndicesService extends AbstractLifecycleComponent i
for (Module pluginModule : pluginsService.indexModules(idxSettings.getSettings())) {
modules.add(pluginModule);
}
- final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig);
+ final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, indicesQueryCache, indicesWarmer);
for (IndexEventListener listener : builtInListeners) {
indexModule.addIndexEventListener(listener);
}
indexModule.addIndexEventListener(oldShardsStats);
modules.add(new AnalysisModule(idxSettings.getSettings(), indicesAnalysisService));
- modules.add(new IndexCacheModule(idxSettings.getSettings()));
modules.add(indexModule);
pluginsService.processModules(modules);
final IndexEventListener listener = indexModule.freeze();
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java
index e6b8d3a8387..eea4fb1753f 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java
@@ -19,23 +19,21 @@
package org.elasticsearch.indices;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.IndexService;
+import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard;
-import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
/**
@@ -46,68 +44,46 @@ public final class IndicesWarmer extends AbstractComponent {
private final ThreadPool threadPool;
- private final ClusterService clusterService;
-
- private final IndicesService indicesService;
-
private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>();
@Inject
- public IndicesWarmer(Settings settings, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService) {
+ public IndicesWarmer(Settings settings, ThreadPool threadPool) {
super(settings);
this.threadPool = threadPool;
- this.clusterService = clusterService;
- this.indicesService = indicesService;
}
public void addListener(Listener listener) {
listeners.add(listener);
}
-
public void removeListener(Listener listener) {
listeners.remove(listener);
}
- public void warmNewReaders(final WarmerContext context) {
- warmInternal(context, false);
- }
-
- public void warmTopReader(WarmerContext context) {
- warmInternal(context, true);
- }
-
- private void warmInternal(final WarmerContext context, boolean topReader) {
- final IndexMetaData indexMetaData = clusterService.state().metaData().index(context.shardId().index().name());
- if (indexMetaData == null) {
+ public void warm(Engine.Searcher searcher, IndexShard shard, IndexSettings settings, boolean isTopReader) {
+ if (shard.state() == IndexShardState.CLOSED) {
return;
}
- if (!indexMetaData.getSettings().getAsBoolean(INDEX_WARMER_ENABLED, settings.getAsBoolean(INDEX_WARMER_ENABLED, true))) {
- return;
- }
- IndexService indexService = indicesService.indexService(context.shardId().index().name());
- if (indexService == null) {
- return;
- }
- final IndexShard indexShard = indexService.getShardOrNull(context.shardId().id());
- if (indexShard == null) {
+ final IndexMetaData indexMetaData = settings.getIndexMetaData();
+ final Settings indexSettings = settings.getSettings();
+ if (!indexSettings.getAsBoolean(INDEX_WARMER_ENABLED, settings.getNodeSettings().getAsBoolean(INDEX_WARMER_ENABLED, true))) {
return;
}
if (logger.isTraceEnabled()) {
- if (topReader) {
- logger.trace("[{}][{}] top warming [{}]", context.shardId().index().name(), context.shardId().id(), context);
+ if (isTopReader) {
+ logger.trace("{} top warming [{}]", shard.shardId(), searcher.reader());
} else {
- logger.trace("[{}][{}] warming [{}]", context.shardId().index().name(), context.shardId().id(), context);
+ logger.trace("{} warming [{}]", shard.shardId(), searcher.reader());
}
}
- indexShard.warmerService().onPreWarm();
+ shard.warmerService().onPreWarm();
long time = System.nanoTime();
final List terminationHandles = new ArrayList<>();
// get a handle on pending tasks
for (final Listener listener : listeners) {
- if (topReader) {
- terminationHandles.add(listener.warmTopReader(indexShard, indexMetaData, context, threadPool));
+ if (isTopReader) {
+ terminationHandles.add(listener.warmTopReader(shard, searcher));
} else {
- terminationHandles.add(listener.warmNewReaders(indexShard, indexMetaData, context, threadPool));
+ terminationHandles.add(listener.warmNewReaders(shard, searcher));
}
}
// wait for termination
@@ -116,7 +92,7 @@ public final class IndicesWarmer extends AbstractComponent {
terminationHandle.awaitTermination();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- if (topReader) {
+ if (isTopReader) {
logger.warn("top warming has been interrupted", e);
} else {
logger.warn("warming has been interrupted", e);
@@ -125,69 +101,36 @@ public final class IndicesWarmer extends AbstractComponent {
}
}
long took = System.nanoTime() - time;
- indexShard.warmerService().onPostWarm(took);
- if (indexShard.warmerService().logger().isTraceEnabled()) {
- if (topReader) {
- indexShard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
+ shard.warmerService().onPostWarm(took);
+ if (shard.warmerService().logger().isTraceEnabled()) {
+ if (isTopReader) {
+ shard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
} else {
- indexShard.warmerService().logger().trace("warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
+ shard.warmerService().logger().trace("warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
}
}
}
+ /**
+ * Returns an executor for async warmer tasks
+ */
+ public Executor getExecutor() {
+ return threadPool.executor(ThreadPool.Names.WARMER);
+ }
+
/** A handle on the execution of warm-up action. */
public interface TerminationHandle {
- public static TerminationHandle NO_WAIT = new TerminationHandle() {
- @Override
- public void awaitTermination() {}
- };
+ TerminationHandle NO_WAIT = () -> {};
/** Wait until execution of the warm-up action completes. */
void awaitTermination() throws InterruptedException;
}
- public static abstract class Listener {
-
- public String executor() {
- return ThreadPool.Names.WARMER;
- }
-
+ public interface Listener {
/** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the execution of those tasks. */
- public abstract TerminationHandle warmNewReaders(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool);
+ TerminationHandle warmNewReaders(IndexShard indexShard, Engine.Searcher searcher);
- public abstract TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool);
+ TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher);
}
- public static final class WarmerContext {
-
- private final ShardId shardId;
- private final Engine.Searcher searcher;
-
- public WarmerContext(ShardId shardId, Engine.Searcher searcher) {
- this.shardId = shardId;
- this.searcher = searcher;
- }
-
- public ShardId shardId() {
- return shardId;
- }
-
- /** Return a searcher instance that only wraps the segments to warm. */
- public Engine.Searcher searcher() {
- return searcher;
- }
-
- public IndexReader reader() {
- return searcher.reader();
- }
-
- public DirectoryReader getDirectoryReader() {
- return searcher.getDirectoryReader();
- }
-
- @Override
- public String toString() {
- return "WarmerContext: " + searcher.reader();
- }
- }
}
diff --git a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
index 30cd6de1233..148f7ba8bdb 100644
--- a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
@@ -21,6 +21,7 @@ package org.elasticsearch.indices.cache.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BulkScorer;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.LRUQueryCache;
import org.apache.lucene.search.Query;
@@ -256,6 +257,12 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
shardKeyMap.add(context.reader());
return in.scorer(context);
}
+
+ @Override
+ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
+ shardKeyMap.add(context.reader());
+ return in.bulkScorer(context);
+ }
}
/** Clear all entries that belong to the given index. */
diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
index 42004c91dc1..554bcff4bf2 100644
--- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
+++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
@@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
+import org.elasticsearch.cluster.action.shard.NoOpShardStateActionListener;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
@@ -76,6 +77,8 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent, Boolean> seenMappings = ConcurrentCollections.newConcurrentMap();
@@ -473,7 +476,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) {
diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
index e32e1954c9f..1ce78e33e3f 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
@@ -29,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestActions;
@@ -68,9 +67,7 @@ public class RestCountAction extends BaseRestHandler {
countRequest.source(searchSourceBuilder);
if (RestActions.hasBodyContent(request)) {
BytesReference restContent = RestActions.getRestContent(request);
- QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
- context.parseFieldMatcher(parseFieldMatcher);
- searchSourceBuilder.query(RestActions.getQueryContent(restContent, context));
+ searchSourceBuilder.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher));
} else {
QueryBuilder> queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) {
diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
index 7c01fddf3cc..086446fc53f 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
@@ -22,17 +22,16 @@ package org.elasticsearch.rest.action.explain;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainResponse;
-import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.get.GetResult;
-import org.elasticsearch.index.query.Operator;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.query.QueryStringQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestBuilderListener;
@@ -50,9 +49,12 @@ import static org.elasticsearch.rest.RestStatus.OK;
*/
public class RestExplainAction extends BaseRestHandler {
+ private final IndicesQueriesRegistry indicesQueriesRegistry;
+
@Inject
- public RestExplainAction(Settings settings, RestController controller, Client client) {
+ public RestExplainAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
super(settings, controller, client);
+ this.indicesQueriesRegistry = indicesQueriesRegistry;
controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this);
controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this);
}
@@ -65,22 +67,11 @@ public class RestExplainAction extends BaseRestHandler {
explainRequest.preference(request.param("preference"));
String queryString = request.param("q");
if (RestActions.hasBodyContent(request)) {
- explainRequest.source(RestActions.getRestContent(request));
+ BytesReference restContent = RestActions.getRestContent(request);
+ explainRequest.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher));
} else if (queryString != null) {
- QueryStringQueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery(queryString);
- queryStringBuilder.defaultField(request.param("df"));
- queryStringBuilder.analyzer(request.param("analyzer"));
- queryStringBuilder.analyzeWildcard(request.paramAsBoolean("analyze_wildcard", false));
- queryStringBuilder.lowercaseExpandedTerms(request.paramAsBoolean("lowercase_expanded_terms", true));
- queryStringBuilder.lenient(request.paramAsBoolean("lenient", null));
- String defaultOperator = request.param("default_operator");
- if (defaultOperator != null) {
- queryStringBuilder.defaultOperator(Operator.fromString(defaultOperator));
- }
-
- QuerySourceBuilder querySourceBuilder = new QuerySourceBuilder();
- querySourceBuilder.setQuery(queryStringBuilder);
- explainRequest.source(querySourceBuilder);
+ QueryBuilder> query = RestActions.urlParamsToQueryBuilder(request);
+ explainRequest.query(query);
}
String sField = request.param("fields");
diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java
index e788f044237..14935f5f9a5 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java
@@ -27,17 +27,8 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions;
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.common.xcontent.XContentType;
-import org.elasticsearch.index.query.Operator;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.query.QueryParseContext;
-import org.elasticsearch.index.query.QueryStringQueryBuilder;
+import org.elasticsearch.common.xcontent.*;
+import org.elasticsearch.index.query.*;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.search.builder.SearchSourceBuilder;
@@ -142,14 +133,12 @@ public class RestActions {
return content;
}
- public static QueryBuilder> getQueryContent(BytesReference source, QueryParseContext context) {
+ public static QueryBuilder> getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, ParseFieldMatcher parseFieldMatcher) {
+ QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
try (XContentParser requestParser = XContentFactory.xContent(source).createParser(source)) {
- // Save the parseFieldMatcher because its about to be trashed in the
- // QueryParseContext
- ParseFieldMatcher parseFieldMatcher = context.parseFieldMatcher();
context.reset(requestParser);
context.parseFieldMatcher(parseFieldMatcher);
- return context.parseInnerQueryBuilder();
+ return context.parseTopLevelQueryBuilder();
} catch (IOException e) {
throw new ElasticsearchException("failed to parse source", e);
} finally {
diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java
index 054cc5b9500..b84a5804c05 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -156,7 +156,6 @@ import java.util.Set;
*/
public class SearchModule extends AbstractModule {
- private final Settings settings;
private final Set> aggParsers = new HashSet<>();
private final Set> pipelineAggParsers = new HashSet<>();
private final Highlighters highlighters = new Highlighters();
@@ -169,19 +168,6 @@ public class SearchModule extends AbstractModule {
// pkg private so tests can mock
Class extends SearchService> searchServiceImpl = SearchService.class;
- public SearchModule(Settings settings) {
- this.settings = settings;
- }
-
- // TODO document public API
- public void registerStream(SignificanceHeuristicStreams.Stream stream) {
- SignificanceHeuristicStreams.registerStream(stream);
- }
-
- public void registerStream(MovAvgModelStreams.Stream stream) {
- MovAvgModelStreams.registerStream(stream);
- }
-
public void registerHighlighter(String key, Class extends Highlighter> clazz) {
highlighters.registerExtension(key, clazz);
}
diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java
index 4deb13ca4a5..eb993f45e21 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchService.java
@@ -68,7 +68,6 @@ import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.IndicesWarmer.TerminationHandle;
-import org.elasticsearch.indices.IndicesWarmer.WarmerContext;
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.script.ExecutableScript;
@@ -180,8 +179,8 @@ public class SearchService extends AbstractLifecycleComponent imp
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval);
- this.indicesWarmer.addListener(new NormsWarmer());
- this.indicesWarmer.addListener(new FieldDataWarmer());
+ this.indicesWarmer.addListener(new NormsWarmer(indicesWarmer));
+ this.indicesWarmer.addListener(new FieldDataWarmer(indicesWarmer));
this.indicesWarmer.addListener(new SearchWarmer());
defaultSearchTimeout = settings.getAsTime(DEFAULT_SEARCH_TIMEOUT, NO_TIMEOUT);
@@ -949,11 +948,15 @@ public class SearchService extends AbstractLifecycleComponent imp
return this.activeContexts.size();
}
- static class NormsWarmer extends IndicesWarmer.Listener {
+ static class NormsWarmer implements IndicesWarmer.Listener {
+ private final IndicesWarmer indicesWarmer;
+ public NormsWarmer(IndicesWarmer indicesWarmer) {
+ this.indicesWarmer = indicesWarmer;
+ }
@Override
- public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
- final Loading defaultLoading = Loading.parse(indexMetaData.getSettings().get(NORMS_LOADING_KEY), Loading.LAZY);
+ public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
+ final Loading defaultLoading = Loading.parse(indexShard.getIndexSettings().getSettings().get(NORMS_LOADING_KEY), Loading.LAZY);
final MapperService mapperService = indexShard.mapperService();
final ObjectSet warmUp = new ObjectHashSet<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@@ -971,14 +974,14 @@ public class SearchService extends AbstractLifecycleComponent imp
final CountDownLatch latch = new CountDownLatch(1);
// Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task
- threadPool.executor(executor()).execute(new Runnable() {
+ indicesWarmer.getExecutor().execute(new Runnable() {
@Override
public void run() {
try {
for (ObjectCursor stringObjectCursor : warmUp) {
final String indexName = stringObjectCursor.value;
final long start = System.nanoTime();
- for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
+ for (final LeafReaderContext ctx : searcher.reader().leaves()) {
final NumericDocValues values = ctx.reader().getNormValues(indexName);
if (values != null) {
values.get(0);
@@ -1005,15 +1008,21 @@ public class SearchService extends AbstractLifecycleComponent imp
}
@Override
- public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) {
+ public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) {
return TerminationHandle.NO_WAIT;
}
}
- static class FieldDataWarmer extends IndicesWarmer.Listener {
+ static class FieldDataWarmer implements IndicesWarmer.Listener {
+
+ private final IndicesWarmer indicesWarmer;
+
+ public FieldDataWarmer(IndicesWarmer indicesWarmer) {
+ this.indicesWarmer = indicesWarmer;
+ }
@Override
- public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
+ public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService();
final Map warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@@ -1048,9 +1057,9 @@ public class SearchService extends AbstractLifecycleComponent imp
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
- final Executor executor = threadPool.executor(executor());
- final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
- for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
+ final Executor executor = indicesWarmer.getExecutor();
+ final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
+ for (final LeafReaderContext ctx : searcher.reader().leaves()) {
for (final MappedFieldType fieldType : warmUp.values()) {
executor.execute(new Runnable() {
@@ -1081,7 +1090,7 @@ public class SearchService extends AbstractLifecycleComponent imp
}
@Override
- public TerminationHandle warmTopReader(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
+ public TerminationHandle warmTopReader(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService();
final Map warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@@ -1114,7 +1123,7 @@ public class SearchService extends AbstractLifecycleComponent imp
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
- final Executor executor = threadPool.executor(executor());
+ final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size());
for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) {
executor.execute(new Runnable() {
@@ -1123,7 +1132,7 @@ public class SearchService extends AbstractLifecycleComponent imp
try {
final long start = System.nanoTime();
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType);
- ifd.loadGlobal(context.getDirectoryReader());
+ ifd.loadGlobal(searcher.getDirectoryReader());
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
@@ -1144,83 +1153,73 @@ public class SearchService extends AbstractLifecycleComponent imp
}
}
- class SearchWarmer extends IndicesWarmer.Listener {
+ class SearchWarmer implements IndicesWarmer.Listener {
@Override
- public TerminationHandle warmNewReaders(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) {
- return internalWarm(indexShard, indexMetaData, context, threadPool, false);
+ public TerminationHandle warmNewReaders(IndexShard indexShard, final Engine.Searcher searcher) {
+ return internalWarm(indexShard, searcher, false);
}
@Override
- public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) {
- return internalWarm(indexShard, indexMetaData, context, threadPool, true);
+ public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) {
+ return internalWarm(indexShard, searcher, true);
}
- public TerminationHandle internalWarm(final IndexShard indexShard, final IndexMetaData indexMetaData, final IndicesWarmer.WarmerContext warmerContext, ThreadPool threadPool, final boolean top) {
- IndexWarmersMetaData custom = indexMetaData.custom(IndexWarmersMetaData.TYPE);
+ public TerminationHandle internalWarm(final IndexShard indexShard, final Engine.Searcher searcher, final boolean top) {
+ IndexWarmersMetaData custom = indexShard.getIndexSettings().getIndexMetaData().custom(IndexWarmersMetaData.TYPE);
if (custom == null) {
return TerminationHandle.NO_WAIT;
}
- final Executor executor = threadPool.executor(executor());
+ final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(custom.entries().size());
for (final IndexWarmersMetaData.Entry entry : custom.entries()) {
- executor.execute(new Runnable() {
-
- @Override
- public void run() {
- SearchContext context = null;
+ executor.execute(() -> {
+ SearchContext context = null;
+ try {
+ long now = System.nanoTime();
+ final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name());
+ QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry());
+ queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher());
+ ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexShard.getIndexSettings()
+ .getNumberOfShards(),
+ SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());
+ context = createContext(request, searcher);
+ // if we use sort, we need to do query to sort on
+ // it and load relevant field data
+ // if not, we might as well set size=0 (and cache
+ // if needed)
+ if (context.sort() == null) {
+ context.size(0);
+ }
+ boolean canCache = indicesQueryCache.canCache(request, context);
+ // early terminate when we can cache, since we
+ // can only do proper caching on top level searcher
+ // also, if we can't cache, and its top, we don't
+ // need to execute it, since we already did when its
+ // not top
+ if (canCache != top) {
+ return;
+ }
+ loadOrExecuteQueryPhase(request, context, queryPhase);
+ long took = System.nanoTime() - now;
+ if (indexShard.warmerService().logger().isTraceEnabled()) {
+ indexShard.warmerService().logger().trace("warmed [{}], took [{}]", entry.name(), TimeValue.timeValueNanos(took));
+ }
+ } catch (Throwable t) {
+ indexShard.warmerService().logger().warn("warmer [{}] failed", t, entry.name());
+ } finally {
try {
- long now = System.nanoTime();
- final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name());
- QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry());
- queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher());
- ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexMetaData
- .getNumberOfShards(),
- SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());
- context = createContext(request, warmerContext.searcher());
- // if we use sort, we need to do query to sort on
- // it and load relevant field data
- // if not, we might as well set size=0 (and cache
- // if needed)
- if (context.sort() == null) {
- context.size(0);
+ if (context != null) {
+ freeContext(context.id());
+ cleanContext(context);
}
- boolean canCache = indicesQueryCache.canCache(request, context);
- // early terminate when we can cache, since we
- // can only do proper caching on top level searcher
- // also, if we can't cache, and its top, we don't
- // need to execute it, since we already did when its
- // not top
- if (canCache != top) {
- return;
- }
- loadOrExecuteQueryPhase(request, context, queryPhase);
- long took = System.nanoTime() - now;
- if (indexShard.warmerService().logger().isTraceEnabled()) {
- indexShard.warmerService().logger().trace("warmed [{}], took [{}]", entry.name(), TimeValue.timeValueNanos(took));
- }
- } catch (Throwable t) {
- indexShard.warmerService().logger().warn("warmer [{}] failed", t, entry.name());
} finally {
- try {
- if (context != null) {
- freeContext(context.id());
- cleanContext(context);
- }
- } finally {
- latch.countDown();
- }
+ latch.countDown();
}
}
-
});
}
- return new TerminationHandle() {
- @Override
- public void awaitTermination() throws InterruptedException {
- latch.await();
- }
- };
+ return () -> latch.await();
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
index 0ee9b028067..343d335cfa2 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java
@@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongArray;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
index 385c328873e..97c68be3cb4 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
@@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
@@ -163,7 +163,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
resize(geoValues.count());
for (int i = 0; i < count(); ++i) {
GeoPoint target = geoValues.valueAt(i);
- values[i] = XGeoHashUtils.longEncode(target.getLon(), target.getLat(), precision);
+ values[i] = GeoHashUtils.longEncode(target.getLon(), target.getLat(), precision);
}
sort();
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
index 161fb2dd2ad..75d089ebbc8 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java
@@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.util.PriorityQueue;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -101,7 +101,7 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation fieldNames = new ArrayList<>();
fieldNames.add(parser.text());
@@ -742,8 +741,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) {
builder.postQueryBuilder = context.parseInnerQueryBuilder();
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
- FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context);
- builder.fetchSourceContext = fetchSourceContext;
+ builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
} else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) {
List scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@@ -886,8 +884,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
builder.stats = stats;
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
- FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context);
- builder.fetchSourceContext = fetchSourceContext;
+ builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
index 7f1e19b3dba..b321b574d6a 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java
@@ -164,7 +164,7 @@ public class HighlightBuilder implements ToXContent {
}
/**
- * Set this to true when using the highlighterType fast-vector-highlighter
+ * Set this to true when using the highlighterType fvh
* and you want to provide highlighting on filter clauses in your
* query. Default is false .
*/
@@ -237,7 +237,7 @@ public class HighlightBuilder implements ToXContent {
}
/**
- * When using the highlighterType fast-vector-highlighter this setting
+ * When using the highlighterType fvh this setting
* controls how far to look for boundary characters, and defaults to 20.
*/
public HighlightBuilder boundaryMaxScan(Integer boundaryMaxScan) {
@@ -246,7 +246,7 @@ public class HighlightBuilder implements ToXContent {
}
/**
- * When using the highlighterType fast-vector-highlighter this setting
+ * When using the highlighterType fvh this setting
* defines what constitutes a boundary for highlighting. It’s a single string with
* each boundary character defined in it. It defaults to .,!? \t\n
*/
@@ -256,8 +256,8 @@ public class HighlightBuilder implements ToXContent {
}
/**
- * Set type of highlighter to use. Supported types
- * are highlighter , fast-vector-highlighter and postings-highlighter .
+ * Set type of highlighter to use. Out of the box supported types
+ * are plain , fvh and postings .
* The default option selected is dependent on the mappings defined for your index.
* Details of the different highlighter types are covered in the reference guide.
*/
@@ -568,8 +568,8 @@ public class HighlightBuilder implements ToXContent {
}
/**
- * Set type of highlighter to use. Supported types
- * are highlighter , fast-vector-highlighter nad postings-highlighter .
+ * Set type of highlighter to use. Out of the box supported types
+ * are plain , fvh and postings .
* This overrides global settings set by {@link HighlightBuilder#highlighterType(String)}.
*/
public Field highlighterType(String highlighterType) {
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
index 1e519957aac..54366bee8c9 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/Highlighters.java
@@ -19,8 +19,6 @@
package org.elasticsearch.search.highlight;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
@@ -31,26 +29,18 @@ import java.util.*;
*/
public class Highlighters extends ExtensionPoint.ClassMap {
- @Deprecated // remove in 3.0
- private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
private static final String FVH = "fvh";
- @Deprecated // remove in 3.0
- private static final String HIGHLIGHTER = "highlighter";
private static final String PLAIN = "plain";
- @Deprecated // remove in 3.0
- private static final String POSTINGS_HIGHLIGHTER = "postings-highlighter";
private static final String POSTINGS = "postings";
-
private final Map parsers;
- private final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Highlighters.class.getName()));
public Highlighters(){
- this(Collections.EMPTY_MAP);
+ this(Collections.emptyMap());
}
private Highlighters(Map parsers) {
- super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, FAST_VECTOR_HIGHLIGHTER, PLAIN, HIGHLIGHTER, POSTINGS, POSTINGS_HIGHLIGHTER)),
+ super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, PLAIN, POSTINGS)),
Highlighters.class);
this.parsers = Collections.unmodifiableMap(parsers);
}
@@ -61,31 +51,15 @@ public class Highlighters extends ExtensionPoint.ClassMap {
}
private static Map addBuiltIns(Settings settings, Map parsers) {
- // build in highlighers
Map map = new HashMap<>();
map.put(FVH, new FastVectorHighlighter(settings));
- map.put(FAST_VECTOR_HIGHLIGHTER, map.get(FVH));
map.put(PLAIN, new PlainHighlighter());
- map.put(HIGHLIGHTER, map.get(PLAIN));
map.put(POSTINGS, new PostingsHighlighter());
- map.put(POSTINGS_HIGHLIGHTER, map.get(POSTINGS));
map.putAll(parsers);
return map;
}
public Highlighter get(String type) {
- switch (type) {
- case FAST_VECTOR_HIGHLIGHTER:
- deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", FAST_VECTOR_HIGHLIGHTER, FVH);
- break;
- case HIGHLIGHTER:
- deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", HIGHLIGHTER, PLAIN);
- break;
- case POSTINGS_HIGHLIGHTER:
- deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", POSTINGS_HIGHLIGHTER, POSTINGS);
- break;
- }
return parsers.get(type);
}
-
}
diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
index c4aa23fc2a6..44fae80a020 100644
--- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
@@ -92,7 +92,6 @@ public class QueryPhase implements SearchPhase {
parseElements.put("query", new QueryParseElement());
parseElements.put("queryBinary", new QueryBinaryParseElement());
parseElements.put("query_binary", new QueryBinaryParseElement());
- parseElements.put("filter", new PostFilterParseElement()); // For bw comp reason, should be removed in version 1.1
parseElements.put("post_filter", new PostFilterParseElement());
parseElements.put("postFilter", new PostFilterParseElement());
parseElements.put("filterBinary", new FilterBinaryParseElement());
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
index 8470633fdc5..d5e4f7cf90d 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.HasContextAndHeaders;
-import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -43,7 +42,6 @@ import static org.elasticsearch.search.suggest.SuggestUtils.parseSuggestContext;
public class CompletionSuggestParser implements SuggestContextParser {
private CompletionSuggester completionSuggester;
- private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("edit_distance");
public CompletionSuggestParser(CompletionSuggester completionSuggester) {
this.completionSuggester = completionSuggester;
@@ -75,7 +73,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) {
fuzzyConfigName = parser.currentName();
} else if (token.isValue()) {
- if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, FUZZINESS)) {
+ if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, Fuzziness.FIELD)) {
suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance());
} else if ("transpositions".equals(fuzzyConfigName)) {
suggestion.setFuzzyTranspositions(parser.booleanValue());
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java
index f2d168f5532..a3a4a3f6696 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/context/GeolocationContextMapping.java
@@ -24,7 +24,7 @@ import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
@@ -227,7 +227,7 @@ public class GeolocationContextMapping extends ContextMapping {
if(parser.nextToken() == Token.VALUE_NUMBER) {
double lat = parser.doubleValue();
if(parser.nextToken() == Token.END_ARRAY) {
- return Collections.singleton(XGeoHashUtils.stringEncode(lon, lat));
+ return Collections.singleton(GeoHashUtils.stringEncode(lon, lat));
} else {
throw new ElasticsearchParseException("only two values expected");
}
@@ -294,7 +294,7 @@ public class GeolocationContextMapping extends ContextMapping {
* @return new geolocation query
*/
public static GeoQuery query(String name, double lat, double lon, int ... precisions) {
- return query(name, XGeoHashUtils.stringEncode(lon, lat), precisions);
+ return query(name, GeoHashUtils.stringEncode(lon, lat), precisions);
}
public static GeoQuery query(String name, double lat, double lon, String ... precisions) {
@@ -302,7 +302,7 @@ public class GeolocationContextMapping extends ContextMapping {
for (int i = 0 ; i < precisions.length; i++) {
precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]);
}
- return query(name, XGeoHashUtils.stringEncode(lon, lat), precisionInts);
+ return query(name, GeoHashUtils.stringEncode(lon, lat), precisionInts);
}
/**
@@ -574,7 +574,7 @@ public class GeolocationContextMapping extends ContextMapping {
* @return this
*/
public Builder addDefaultLocation(double lat, double lon) {
- this.defaultLocations.add(XGeoHashUtils.stringEncode(lon, lat));
+ this.defaultLocations.add(GeoHashUtils.stringEncode(lon, lat));
return this;
}
@@ -604,7 +604,7 @@ public class GeolocationContextMapping extends ContextMapping {
@Override
public GeolocationContextMapping build() {
if(precisions.isEmpty()) {
- precisions.add(XGeoHashUtils.PRECISION);
+ precisions.add(GeoHashUtils.PRECISION);
}
int[] precisionArray = precisions.toArray();
Arrays.sort(precisionArray);
@@ -670,7 +670,7 @@ public class GeolocationContextMapping extends ContextMapping {
int precision = Math.min(p, geohash.length());
String truncatedGeohash = geohash.substring(0, precision);
if(mapping.neighbors) {
- XGeoHashUtils.addNeighbors(truncatedGeohash, precision, locations);
+ GeoHashUtils.addNeighbors(truncatedGeohash, precision, locations);
}
locations.add(truncatedGeohash);
}
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
index 7e7f347ce1b..a04117bff11 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
@@ -31,46 +31,12 @@ grant codeBase "file:${{java.ext.dirs}}/*" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
-grant codeBase "${es.security.jar.lucene.core}" {
+grant codeBase "${codebase.lucene-core-5.4.0-snapshot-1710880.jar}" {
// needed to allow MMapDirectory's "unmap hack"
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
-//// test framework permissions.
-//// These are mock objects and test management that we allow test framework libs
-//// to provide on our behalf. But tests themselves cannot do this stuff!
-
-grant codeBase "${es.security.jar.elasticsearch.securemock}" {
- // needed to access ReflectionFactory (see below)
- permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
- // needed to support creation of mocks
- permission java.lang.RuntimePermission "reflectionFactoryAccess";
- // needed for spy interception, etc
- permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
-};
-
-grant codeBase "${es.security.jar.lucene.testframework}" {
- // needed by RamUsageTester
- permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
-};
-
-grant codeBase "${es.security.jar.randomizedtesting.runner}" {
- // optionally needed for access to private test methods (e.g. beforeClass)
- permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
-
- // needed for top threads handling
- permission java.lang.RuntimePermission "modifyThreadGroup";
-};
-
-grant codeBase "${es.security.jar.randomizedtesting.junit4}" {
- // needed for gson serialization
- permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
-
- // needed for stream redirection
- permission java.lang.RuntimePermission "setIO";
-};
-
//// Everything else:
grant {
@@ -107,10 +73,6 @@ grant {
// otherwise can be provided only to test libraries
permission java.lang.RuntimePermission "getStackTrace";
- // needed by ESTestCase for leniency of thread exceptions (?!)
- // otherwise can be provided only to test libraries
- permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
-
// needed by JMX instead of getFileSystemAttributes, seems like a bug...
permission java.lang.RuntimePermission "getFileStoreAttributes";
@@ -126,10 +88,4 @@ grant {
// needed by JDKESLoggerTests
permission java.util.logging.LoggingPermission "control";
-
- // needed to install SSLFactories, advanced SSL configuration, etc.
- permission java.lang.RuntimePermission "setFactory";
-
- // needed to allow installation of bouncycastle crypto provider
- permission java.security.SecurityPermission "putProviderProperty.BC";
};
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
new file mode 100644
index 00000000000..c9bde842fe5
--- /dev/null
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+//// additional test framework permissions.
+//// These are mock objects and test management that we allow test framework libs
+//// to provide on our behalf. But tests themselves cannot do this stuff!
+
+grant codeBase "${codebase.securemock-1.1.jar}" {
+ // needed to access ReflectionFactory (see below)
+ permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
+ // needed to support creation of mocks
+ permission java.lang.RuntimePermission "reflectionFactoryAccess";
+ // needed for spy interception, etc
+ permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
+};
+
+grant codeBase "${codebase.lucene-test-framework-5.4.0-snapshot-1710880.jar}" {
+ // needed by RamUsageTester
+ permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
+};
+
+grant codeBase "${codebase.randomizedtesting-runner-2.2.0.jar}" {
+ // optionally needed for access to private test methods (e.g. beforeClass)
+ permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
+ // needed to fail tests on uncaught exceptions from other threads
+ permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
+ // needed for top threads handling
+ permission java.lang.RuntimePermission "modifyThreadGroup";
+};
+
+grant codeBase "${codebase.junit4-ant-2.2.0.jar}" {
+ // needed for stream redirection
+ permission java.lang.RuntimePermission "setIO";
+};
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
index b015614e32e..8e4ef817f37 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
@@ -367,7 +367,7 @@ public class TransportReplicationActionTests extends ESTestCase {
}
int pending = replicationPhase.pending();
int criticalFailures = 0; // failures that should fail the shard
- int successfull = 1;
+ int successful = 1;
for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) {
if (randomBoolean()) {
Throwable t;
@@ -380,19 +380,19 @@ public class TransportReplicationActionTests extends ESTestCase {
logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName());
transport.handleResponse(capturedRequest.requestId, t);
} else {
- successfull++;
+ successful++;
transport.handleResponse(capturedRequest.requestId, TransportResponse.Empty.INSTANCE);
}
pending--;
assertThat(replicationPhase.pending(), equalTo(pending));
- assertThat(replicationPhase.successful(), equalTo(successfull));
+ assertThat(replicationPhase.successful(), equalTo(successful));
}
assertThat(listener.isDone(), equalTo(true));
Response response = listener.get();
final ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo();
assertThat(shardInfo.getFailed(), equalTo(criticalFailures));
assertThat(shardInfo.getFailures(), arrayWithSize(criticalFailures));
- assertThat(shardInfo.getSuccessful(), equalTo(successfull));
+ assertThat(shardInfo.getSuccessful(), equalTo(successful));
assertThat(shardInfo.getTotal(), equalTo(totalShards));
assertThat("failed to see enough shard failures", transport.capturedRequests().length, equalTo(criticalFailures));
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
index 3d19c5fb296..c6d02c8d89b 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
@@ -19,28 +19,37 @@
package org.elasticsearch.bootstrap;
+import com.carrotsearch.randomizedtesting.RandomizedRunner;
+
+import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestSecurityManager;
import org.elasticsearch.bootstrap.Bootstrap;
import org.elasticsearch.bootstrap.ESPolicy;
import org.elasticsearch.bootstrap.Security;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.plugins.PluginInfo;
+import org.junit.Assert;
import java.io.FilePermission;
import java.io.InputStream;
-import java.net.URI;
import java.net.URL;
import java.nio.file.Path;
import java.security.Permission;
-import java.security.PermissionCollection;
import java.security.Permissions;
import java.security.Policy;
-import java.security.URIParameter;
+import java.security.ProtectionDomain;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Objects;
import java.util.Properties;
+import java.util.Set;
import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
@@ -83,7 +92,6 @@ public class BootstrapForTesting {
// install security manager if requested
if (systemPropertyAsBoolean("tests.security.manager", true)) {
try {
- Security.setCodebaseProperties();
// initialize paths the same exact way as bootstrap
Permissions perms = new Permissions();
// add permissions to everything in classpath
@@ -120,31 +128,17 @@ public class BootstrapForTesting {
if (System.getProperty("tests.maven") == null) {
perms.add(new RuntimePermission("setIO"));
}
-
- final Policy policy;
- // if its a plugin with special permissions, we use a wrapper policy impl to try
- // to simulate what happens with a real distribution
- List pluginPolicies = Collections.list(BootstrapForTesting.class.getClassLoader().getResources(PluginInfo.ES_PLUGIN_POLICY));
- if (!pluginPolicies.isEmpty()) {
- Permissions extra = new Permissions();
- for (URL url : pluginPolicies) {
- URI uri = url.toURI();
- Policy pluginPolicy = Policy.getInstance("JavaPolicy", new URIParameter(uri));
- PermissionCollection permissions = pluginPolicy.getPermissions(BootstrapForTesting.class.getProtectionDomain());
- // this method is supported with the specific implementation we use, but just check for safety.
- if (permissions == Policy.UNSUPPORTED_EMPTY_COLLECTION) {
- throw new UnsupportedOperationException("JavaPolicy implementation does not support retrieving permissions");
- }
- for (Permission permission : Collections.list(permissions.elements())) {
- extra.add(permission);
- }
+
+ // read test-framework permissions
+ final Policy testFramework = Security.readPolicy(Bootstrap.class.getResource("test-framework.policy"), JarHell.parseClassPath());
+ final Policy esPolicy = new ESPolicy(perms, getPluginPermissions());
+ Policy.setPolicy(new Policy() {
+ @Override
+ public boolean implies(ProtectionDomain domain, Permission permission) {
+ // implements union
+ return esPolicy.implies(domain, permission) || testFramework.implies(domain, permission);
}
- // TODO: try to get rid of this class now that the world is simpler?
- policy = new MockPluginPolicy(perms, extra);
- } else {
- policy = new ESPolicy(perms, Collections.emptyMap());
- }
- Policy.setPolicy(policy);
+ });
System.setSecurityManager(new TestSecurityManager());
Security.selfTest();
@@ -168,6 +162,71 @@ public class BootstrapForTesting {
}
}
+ /**
+ * we dont know which codesources belong to which plugin, so just remove the permission from key codebases
+ * like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing.
+ */
+ @SuppressForbidden(reason = "accesses fully qualified URLs to configure security")
+ static Map getPluginPermissions() throws Exception {
+ List pluginPolicies = Collections.list(BootstrapForTesting.class.getClassLoader().getResources(PluginInfo.ES_PLUGIN_POLICY));
+ if (pluginPolicies.isEmpty()) {
+ return Collections.emptyMap();
+ }
+
+ // compute classpath minus obvious places, all other jars will get the permission.
+ Set codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks()));
+ Set excluded = new HashSet<>(Arrays.asList(
+ // es core
+ Bootstrap.class.getProtectionDomain().getCodeSource().getLocation(),
+ // es test framework
+ BootstrapForTesting.class.getProtectionDomain().getCodeSource().getLocation(),
+ // lucene test framework
+ LuceneTestCase.class.getProtectionDomain().getCodeSource().getLocation(),
+ // randomized runner
+ RandomizedRunner.class.getProtectionDomain().getCodeSource().getLocation(),
+ // junit library
+ Assert.class.getProtectionDomain().getCodeSource().getLocation()
+ ));
+ codebases.removeAll(excluded);
+
+ // parse each policy file, with codebase substitution from the classpath
+ final List policies = new ArrayList<>();
+ for (URL policyFile : pluginPolicies) {
+ policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()])));
+ }
+
+ // consult each policy file for those codebases
+ Map map = new HashMap<>();
+ for (URL url : codebases) {
+ map.put(url.getFile(), new Policy() {
+ @Override
+ public boolean implies(ProtectionDomain domain, Permission permission) {
+ // implements union
+ for (Policy p : policies) {
+ if (p.implies(domain, permission)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ });
+ }
+ return Collections.unmodifiableMap(map);
+ }
+
+ /**
+ * return parsed classpath, but with symlinks resolved to destination files for matching
+ * this is for matching the toRealPath() in the code where we have a proper plugin structure
+ */
+ @SuppressForbidden(reason = "does evil stuff with paths and urls because devs and jenkins do evil stuff with paths and urls")
+ static URL[] parseClassPathWithSymlinks() throws Exception {
+ URL raw[] = JarHell.parseClassPath();
+ for (int i = 0; i < raw.length; i++) {
+ raw[i] = PathUtils.get(raw[i].toURI()).toRealPath().toUri().toURL();
+ }
+ return raw;
+ }
+
// does nothing, just easy way to make sure the class is loaded.
public static void ensureInitialized() {}
}
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java b/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java
deleted file mode 100644
index 91ed11cce63..00000000000
--- a/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.bootstrap;
-
-import com.carrotsearch.randomizedtesting.RandomizedRunner;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.elasticsearch.common.logging.Loggers;
-import org.junit.Assert;
-
-import java.net.URL;
-import java.security.CodeSource;
-import java.security.Permission;
-import java.security.PermissionCollection;
-import java.security.Policy;
-import java.security.ProtectionDomain;
-import java.security.cert.Certificate;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * Simulates in unit tests per-plugin permissions.
- * Unit tests for plugins do not have a proper plugin structure,
- * so we don't know which codebases to apply the permission to.
- *
- * As an approximation, we just exclude es/test/framework classes,
- * because they will be present in stacks and fail tests for the
- * simple case where an AccessController block is missing, because
- * java security checks every codebase in the stacktrace, and we
- * are sure to pollute it.
- */
-final class MockPluginPolicy extends Policy {
- final ESPolicy standardPolicy;
- final PermissionCollection extraPermissions;
- final Set excludedSources;
-
- /**
- * Create a new MockPluginPolicy with dynamic {@code permissions} and
- * adding the extra plugin permissions from {@code insecurePluginProp} to
- * all code except test classes.
- */
- MockPluginPolicy(PermissionCollection standard, PermissionCollection extra) throws Exception {
- // the hack begins!
-
- this.standardPolicy = new ESPolicy(standard, Collections.emptyMap());
- this.extraPermissions = extra;
-
- excludedSources = new HashSet();
- // exclude some obvious places
- // es core
- excludedSources.add(Bootstrap.class.getProtectionDomain().getCodeSource());
- // es test framework
- excludedSources.add(getClass().getProtectionDomain().getCodeSource());
- // lucene test framework
- excludedSources.add(LuceneTestCase.class.getProtectionDomain().getCodeSource());
- // test runner
- excludedSources.add(RandomizedRunner.class.getProtectionDomain().getCodeSource());
- // junit library
- excludedSources.add(Assert.class.getProtectionDomain().getCodeSource());
- // scripts
- excludedSources.add(new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[])null));
-
- Loggers.getLogger(getClass()).debug("Apply extra permissions [{}] excluding codebases [{}]", extraPermissions, excludedSources);
- }
-
- @Override
- public boolean implies(ProtectionDomain domain, Permission permission) {
- CodeSource codeSource = domain.getCodeSource();
- // codesource can be null when reducing privileges via doPrivileged()
- if (codeSource == null) {
- return false;
- }
-
- if (standardPolicy.implies(domain, permission)) {
- return true;
- } else if (excludedSources.contains(codeSource) == false &&
- codeSource.toString().contains("test-classes") == false) {
- return extraPermissions.implies(permission);
- } else {
- return false;
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
index e48ca834f53..98eea13e673 100644
--- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
@@ -141,11 +141,11 @@ public class DiskUsageTests extends ESTestCase {
};
NodeStats[] nodeStats = new NodeStats[] {
new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
- null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null),
+ null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null),
new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
- null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null),
+ null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null),
new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
- null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null)
+ null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null)
};
InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages);
DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1");
diff --git a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java
index dd1cb0b9eff..6ac2101fe52 100644
--- a/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java
+++ b/core/src/test/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java
@@ -73,7 +73,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService {
null, null, null, null, null,
fsInfo,
null, null, null,
- null);
+ null, null);
}
@Inject
diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java
new file mode 100644
index 00000000000..7d4c6c25cff
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.cluster.action.shard;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.elasticsearch.action.search.TransportSearchAction;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.cluster.routing.IndexRoutingTable;
+import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.cluster.routing.ShardsIterator;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.cluster.TestClusterService;
+import org.elasticsearch.test.transport.CapturingTransport;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportException;
+import org.elasticsearch.transport.TransportService;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.Assert.*;
+
+public class ShardStateActionTests extends ESTestCase {
+ private static ThreadPool THREAD_POOL;
+
+ private ShardStateAction shardStateAction;
+ private CapturingTransport transport;
+ private TransportService transportService;
+ private TestClusterService clusterService;
+
+ @BeforeClass
+ public static void startThreadPool() {
+ THREAD_POOL = new ThreadPool("ShardStateActionTest");
+ }
+
+ @Override
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ this.transport = new CapturingTransport();
+ clusterService = new TestClusterService(THREAD_POOL);
+ transportService = new TransportService(transport, THREAD_POOL);
+ transportService.start();
+ shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null);
+ }
+
+ @Override
+ @After
+ public void tearDown() throws Exception {
+ transportService.stop();
+ super.tearDown();
+ }
+
+ @AfterClass
+ public static void stopThreadPool() {
+ ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS);
+ THREAD_POOL = null;
+ }
+
+ public void testNoMaster() {
+ final String index = "test";
+
+ clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
+
+ DiscoveryNodes.Builder builder = DiscoveryNodes.builder(clusterService.state().nodes());
+ builder.masterNodeId(null);
+ clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder));
+
+ String indexUUID = clusterService.state().metaData().index(index).getIndexUUID();
+
+ AtomicBoolean noMaster = new AtomicBoolean();
+ assert !noMaster.get();
+
+ shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() {
+ @Override
+ public void onShardFailedNoMaster() {
+ noMaster.set(true);
+ }
+
+ @Override
+ public void onShardFailedFailure(DiscoveryNode master, TransportException e) {
+
+ }
+ });
+
+ assertTrue(noMaster.get());
+ }
+
+ public void testFailure() {
+ final String index = "test";
+
+ clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
+
+ String indexUUID = clusterService.state().metaData().index(index).getIndexUUID();
+
+ AtomicBoolean failure = new AtomicBoolean();
+ assert !failure.get();
+
+ shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() {
+ @Override
+ public void onShardFailedNoMaster() {
+
+ }
+
+ @Override
+ public void onShardFailedFailure(DiscoveryNode master, TransportException e) {
+ failure.set(true);
+ }
+ });
+
+ final CapturingTransport.CapturedRequest[] capturedRequests = transport.capturedRequests();
+ transport.clear();
+ assertThat(capturedRequests.length, equalTo(1));
+ assert !failure.get();
+ transport.handleResponse(capturedRequests[0].requestId, new TransportException("simulated"));
+
+ assertTrue(failure.get());
+ }
+
+ private ShardRouting getRandomShardRouting(String index) {
+ IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index);
+ ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt();
+ ShardRouting shardRouting = shardsIterator.nextOrNull();
+ assert shardRouting != null;
+ return shardRouting;
+ }
+
+ private Throwable getSimulatedFailure() {
+ return new CorruptIndexException("simulated", (String) null);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
index 11e8b6c6a0c..7b8eb2ebc51 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
@@ -847,6 +847,19 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
assertThat(results, arrayContainingInAnyOrder("foo1-closed", "foo2-closed", "foo3"));
}
+ public void testDedupConcreteIndices() {
+ MetaData.Builder mdBuilder = MetaData.builder()
+ .put(indexBuilder("index1").putAlias(AliasMetaData.builder("alias1")));
+ ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build();
+ IndicesOptions[] indicesOptions = new IndicesOptions[]{ IndicesOptions.strictExpandOpen(), IndicesOptions.strictExpand(),
+ IndicesOptions.lenientExpandOpen(), IndicesOptions.strictExpandOpenAndForbidClosed()};
+ for (IndicesOptions options : indicesOptions) {
+ IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options);
+ String[] results = indexNameExpressionResolver.concreteIndices(context, "index1", "index1", "alias1");
+ assertThat(results, equalTo(new String[]{"index1"}));
+ }
+ }
+
private MetaData metaDataBuilder(String... indices) {
MetaData.Builder mdBuilder = MetaData.builder();
for (String concreteIndex : indices) {
diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java
index 4f64f0baca7..e0332648905 100644
--- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java
+++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java
@@ -22,11 +22,14 @@ package org.elasticsearch.common.cache;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadMXBean;
import java.util.*;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutionException;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReferenceArray;
+import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.instanceOf;
@@ -460,6 +463,25 @@ public class CacheTests extends ESTestCase {
assertEquals(replacements, notifications);
}
+ public void testComputeIfAbsentLoadsSuccessfully() {
+ Map map = new HashMap<>();
+ Cache cache = CacheBuilder.builder().build();
+ for (int i = 0; i < numberOfEntries; i++) {
+ try {
+ cache.computeIfAbsent(i, k -> {
+ int value = randomInt();
+ map.put(k, value);
+ return value;
+ });
+ } catch (ExecutionException e) {
+ fail(e.getMessage());
+ }
+ }
+ for (int i = 0; i < numberOfEntries; i++) {
+ assertEquals(map.get(i), cache.get(i));
+ }
+ }
+
public void testComputeIfAbsentCallsOnce() throws InterruptedException {
int numberOfThreads = randomIntBetween(2, 200);
final Cache cache = CacheBuilder.builder().build();
@@ -502,6 +524,146 @@ public class CacheTests extends ESTestCase {
}
}
+ public void testDependentKeyDeadlock() throws InterruptedException {
+ class Key {
+ private final int key;
+
+ public Key(int key) {
+ this.key = key;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Key key1 = (Key) o;
+
+ return key == key1.key;
+
+ }
+
+ @Override
+ public int hashCode() {
+ return key % 2;
+ }
+ }
+
+ int numberOfThreads = randomIntBetween(2, 256);
+ final Cache cache = CacheBuilder.builder().build();
+ CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
+ CountDownLatch deadlockLatch = new CountDownLatch(numberOfThreads);
+ List threads = new ArrayList<>();
+ for (int i = 0; i < numberOfThreads; i++) {
+ Thread thread = new Thread(() -> {
+ Random random = new Random(random().nextLong());
+ latch.countDown();
+ for (int j = 0; j < numberOfEntries; j++) {
+ Key key = new Key(random.nextInt(numberOfEntries));
+ try {
+ cache.computeIfAbsent(key, k -> {
+ if (k.key == 0) {
+ return 0;
+ } else {
+ Integer value = cache.get(new Key(k.key / 2));
+ return value != null ? value : 0;
+ }
+ });
+ } catch (ExecutionException e) {
+ fail(e.getMessage());
+ }
+ }
+ // successfully avoided deadlock, release the main thread
+ deadlockLatch.countDown();
+ });
+ threads.add(thread);
+ thread.start();
+ }
+
+ AtomicBoolean deadlock = new AtomicBoolean();
+ assert !deadlock.get();
+
+ // start a watchdog service
+ ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
+ scheduler.scheduleAtFixedRate(() -> {
+ Set ids = threads.stream().map(t -> t.getId()).collect(Collectors.toSet());
+ ThreadMXBean mxBean = ManagementFactory.getThreadMXBean();
+ long[] deadlockedThreads = mxBean.findDeadlockedThreads();
+ if (!deadlock.get() && deadlockedThreads != null) {
+ for (long deadlockedThread : deadlockedThreads) {
+ // ensure that we detected deadlock on our threads
+ if (ids.contains(deadlockedThread)) {
+ deadlock.set(true);
+ // release the main test thread to fail the test
+ for (int i = 0; i < numberOfThreads; i++) {
+ deadlockLatch.countDown();
+ }
+ break;
+ }
+ }
+ }
+ }, 1, 1, TimeUnit.SECONDS);
+
+ // everything is setup, release the hounds
+ latch.countDown();
+
+ // wait for either deadlock to be detected or the threads to terminate
+ deadlockLatch.await();
+
+ // shutdown the watchdog service
+ scheduler.shutdown();
+
+ assertFalse("deadlock", deadlock.get());
+ }
+
+ public void testCachePollution() throws InterruptedException {
+ int numberOfThreads = randomIntBetween(2, 200);
+ final Cache cache = CacheBuilder.builder().build();
+ CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
+ List threads = new ArrayList<>();
+ for (int i = 0; i < numberOfThreads; i++) {
+ Thread thread = new Thread(() -> {
+ latch.countDown();
+ Random random = new Random(random().nextLong());
+ for (int j = 0; j < numberOfEntries; j++) {
+ Integer key = random.nextInt(numberOfEntries);
+ boolean first;
+ boolean second;
+ do {
+ first = random.nextBoolean();
+ second = random.nextBoolean();
+ } while (first && second);
+ if (first && !second) {
+ try {
+ cache.computeIfAbsent(key, k -> {
+ if (random.nextBoolean()) {
+ return Integer.toString(k);
+ } else {
+ throw new Exception("testCachePollution");
+ }
+ });
+ } catch (ExecutionException e) {
+ assertNotNull(e.getCause());
+ assertThat(e.getCause(), instanceOf(Exception.class));
+ assertEquals(e.getCause().getMessage(), "testCachePollution");
+ }
+ } else if (!first && second) {
+ cache.invalidate(key);
+ } else if (!first && !second) {
+ cache.get(key);
+ }
+ }
+ });
+ threads.add(thread);
+ thread.start();
+ }
+
+ latch.countDown();
+ for (Thread thread : threads) {
+ thread.join();
+ }
+ }
+
// test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key
// here be dragons: this test did catch one subtle bug during development; do not remove lightly
public void testTorture() throws InterruptedException {
diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java
index 934400d8188..d9d1245fb42 100644
--- a/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java
+++ b/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java
@@ -18,13 +18,13 @@
*/
package org.elasticsearch.common.geo;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.test.ESTestCase;
/**
- * Tests for {@link org.apache.lucene.util.XGeoHashUtils}
+ * Tests for {@link org.apache.lucene.util.GeoHashUtils}
*/
public class GeoHashTests extends ESTestCase {
public void testGeohashAsLongRoutines() {
@@ -39,13 +39,13 @@ public class GeoHashTests extends ESTestCase {
{
for(int p=1;p<=12;p++)
{
- long geoAsLong = XGeoHashUtils.longEncode(lng, lat, p);
+ long geoAsLong = GeoHashUtils.longEncode(lng, lat, p);
// string encode from geohashlong encoded location
- String geohashFromLong = XGeoHashUtils.stringEncode(geoAsLong);
+ String geohashFromLong = GeoHashUtils.stringEncode(geoAsLong);
// string encode from full res lat lon
- String geohash = XGeoHashUtils.stringEncode(lng, lat, p);
+ String geohash = GeoHashUtils.stringEncode(lng, lat, p);
// ensure both strings are the same
assertEquals(geohash, geohashFromLong);
diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
index f717102bce9..2a1b146da92 100644
--- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
@@ -116,6 +116,11 @@ public class DiscoveryModuleTests extends ModuleTestCase {
}
+ @Override
+ public DiscoveryStats stats() {
+ return null;
+ }
+
@Override
public Lifecycle.State lifecycleState() {
return null;
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
index 11d94beac17..0b5f9997dba 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
@@ -22,6 +22,7 @@ package org.elasticsearch.discovery.zen;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterService;
@@ -34,7 +35,11 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.LocalTransportAddress;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.fd.FaultDetection;
import org.elasticsearch.discovery.zen.membership.MembershipAction;
@@ -256,4 +261,37 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
}
+ public void testDiscoveryStats() throws IOException {
+ String expectedStatsJsonResponse = "{\n" +
+ " \"discovery\" : {\n" +
+ " \"cluster_state_queue\" : {\n" +
+ " \"total\" : 0,\n" +
+ " \"pending\" : 0,\n" +
+ " \"committed\" : 0\n" +
+ " }\n" +
+ " }\n" +
+ "}";
+
+ Settings nodeSettings = Settings.settingsBuilder()
+ .put("discovery.type", "zen") // <-- To override the local setting if set externally
+ .build();
+ internalCluster().startNode(nodeSettings);
+
+ logger.info("--> request node discovery stats");
+ NodesStatsResponse statsResponse = client().admin().cluster().prepareNodesStats().clear().setDiscovery(true).get();
+ assertThat(statsResponse.getNodes().length, equalTo(1));
+
+ DiscoveryStats stats = statsResponse.getNodes()[0].getDiscoveryStats();
+ assertThat(stats.getQueueStats(), notNullValue());
+ assertThat(stats.getQueueStats().getTotal(), equalTo(0));
+ assertThat(stats.getQueueStats().getCommitted(), equalTo(0));
+ assertThat(stats.getQueueStats().getPending(), equalTo(0));
+
+ XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
+ builder.startObject();
+ stats.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ builder.endObject();
+
+ assertThat(builder.string(), equalTo(expectedStatsJsonResponse));
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java
index a8e9f00eb7f..bc5e97ce08e 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java
@@ -162,6 +162,31 @@ public class PendingClusterStatesQueueTests extends ESTestCase {
}
}
+ public void testQueueStats() {
+ List states = randomStates(scaledRandomIntBetween(10, 100), "master");
+ PendingClusterStatesQueue queue = createQueueWithStates(states);
+ assertThat(queue.stats().getTotal(), equalTo(states.size()));
+ assertThat(queue.stats().getPending(), equalTo(states.size()));
+ assertThat(queue.stats().getCommitted(), equalTo(0));
+
+ List committedContexts = randomCommitStates(queue);
+ assertThat(queue.stats().getTotal(), equalTo(states.size()));
+ assertThat(queue.stats().getPending(), equalTo(states.size() - committedContexts.size()));
+ assertThat(queue.stats().getCommitted(), equalTo(committedContexts.size()));
+
+ ClusterState highestCommitted = null;
+ for (ClusterStateContext context : committedContexts) {
+ if (highestCommitted == null || context.state.supersedes(highestCommitted)) {
+ highestCommitted = context.state;
+ }
+ }
+
+ queue.markAsProcessed(highestCommitted);
+ assertThat(queue.stats().getTotal(), equalTo(states.size() - committedContexts.size()));
+ assertThat(queue.stats().getPending(), equalTo(states.size() - committedContexts.size()));
+ assertThat(queue.stats().getCommitted(), equalTo(0));
+ }
+
protected List randomCommitStates(PendingClusterStatesQueue queue) {
List committedContexts = new ArrayList<>();
for (int iter = randomInt(queue.pendingStates.size() - 1); iter >= 0; iter--) {
diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
index 9f2947936bc..5b7577e27ce 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
@@ -21,15 +21,16 @@ package org.elasticsearch.index;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.CollectionStatistics;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.TermStatistics;
+import org.apache.lucene.search.*;
import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.ModuleTestCase;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.cache.query.QueryCache;
+import org.elasticsearch.index.cache.query.index.IndexQueryCache;
+import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.engine.EngineFactory;
@@ -40,6 +41,7 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig;
+import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.engine.MockEngineFactory;
@@ -49,22 +51,22 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
public class IndexModuleTests extends ModuleTestCase {
-
+ private final IndicesWarmer warmer = new IndicesWarmer(Settings.EMPTY, null);
public void testWrapperIsBound() {
final Index index = new Index("foo");
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
- IndexModule module = new IndexModule(indexSettings, null);
- assertInstanceBinding(module, IndexSearcherWrapper.class,(x) -> x == null);
- module.indexSearcherWrapper = Wrapper.class;
- assertBinding(module, IndexSearcherWrapper.class, Wrapper.class);
+ IndexModule module = new IndexModule(indexSettings, null, null, warmer);
+ assertInstanceBinding(module, IndexModule.IndexSearcherWrapperFactory.class, (x) -> x.newWrapper(null) == null);
+ module.setSearcherWrapper((s) -> new Wrapper());
+ assertInstanceBinding(module, IndexModule.IndexSearcherWrapperFactory.class, (x) -> x.newWrapper(null) instanceof Wrapper);
}
public void testEngineFactoryBound() {
final Index index = new Index("foo");
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
- IndexModule module = new IndexModule(indexSettings, null);
+ IndexModule module = new IndexModule(indexSettings, null, null, warmer);
assertBinding(module, EngineFactory.class, InternalEngineFactory.class);
module.engineFactoryImpl = MockEngineFactory.class;
assertBinding(module, EngineFactory.class, MockEngineFactory.class);
@@ -74,7 +76,7 @@ public class IndexModuleTests extends ModuleTestCase {
final Index index = new Index("foo");
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexModule.STORE_TYPE, "foo_store").build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
- IndexModule module = new IndexModule(indexSettings, null);
+ IndexModule module = new IndexModule(indexSettings, null, null, warmer);
module.addIndexStore("foo_store", FooStore::new);
assertInstanceBinding(module, IndexStore.class, (x) -> x.getClass() == FooStore.class);
try {
@@ -96,7 +98,7 @@ public class IndexModuleTests extends ModuleTestCase {
final Index index = new Index("foo");
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
- IndexModule module = new IndexModule(indexSettings, null);
+ IndexModule module = new IndexModule(indexSettings, null, null, warmer);
Consumer listener = (s) -> {};
module.addIndexSettingsListener(listener);
module.addIndexEventListener(eventListener);
@@ -117,7 +119,7 @@ public class IndexModuleTests extends ModuleTestCase {
final Index index = new Index("foo");
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
- IndexModule module = new IndexModule(indexSettings, null);
+ IndexModule module = new IndexModule(indexSettings, null, null, warmer);
Consumer listener = (s) -> {
};
module.addIndexSettingsListener(listener);
@@ -145,7 +147,7 @@ public class IndexModuleTests extends ModuleTestCase {
.put("index.similarity.my_similarity.type", "test_similarity")
.put("index.similarity.my_similarity.key", "there is a key")
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null);
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() {
@Override
public String name() {
@@ -174,7 +176,7 @@ public class IndexModuleTests extends ModuleTestCase {
.put("index.similarity.my_similarity.type", "test_similarity")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null);
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
try {
assertInstanceBinding(module, SimilarityService.class, (inst) -> inst instanceof SimilarityService);
} catch (IllegalArgumentException ex) {
@@ -188,7 +190,7 @@ public class IndexModuleTests extends ModuleTestCase {
.put("index.similarity.my_similarity.foo", "bar")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null);
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
try {
assertInstanceBinding(module, SimilarityService.class, (inst) -> inst instanceof SimilarityService);
} catch (IllegalArgumentException ex) {
@@ -196,6 +198,76 @@ public class IndexModuleTests extends ModuleTestCase {
}
}
+ public void testCannotRegisterProvidedImplementations() {
+ Settings indexSettings = Settings.settingsBuilder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
+ try {
+ module.registerQueryCache("index", IndexQueryCache::new);
+ fail("only once");
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [index]");
+ }
+
+ try {
+ module.registerQueryCache("none", (settings, x) -> new NoneQueryCache(settings));
+ fail("only once");
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [none]");
+ }
+
+ try {
+ module.registerQueryCache("index", null);
+ fail("must not be null");
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "provider must not be null");
+ }
+ }
+
+ public void testRegisterCustomQueryCache() {
+ Settings indexSettings = Settings.settingsBuilder()
+ .put(IndexModule.QUERY_CACHE_TYPE, "custom")
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
+ module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
+ try {
+ module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
+ fail("only once");
+ } catch (IllegalArgumentException e) {
+ assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [custom]");
+ }
+ assertInstanceBinding(module, QueryCache.class, (x) -> x instanceof CustomQueryCache);
+ }
+
+ public void testDefaultQueryCacheImplIsSelected() {
+ Settings indexSettings = Settings.settingsBuilder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer);
+ assertInstanceBinding(module, QueryCache.class, (x) -> x instanceof IndexQueryCache);
+ }
+
+ class CustomQueryCache implements QueryCache {
+
+ @Override
+ public void clear(String reason) {
+ }
+
+ @Override
+ public void close() throws IOException {
+ }
+
+ @Override
+ public Index index() {
+ return new Index("test");
+ }
+
+ @Override
+ public Weight doCache(Weight weight, QueryCachingPolicy policy) {
+ return weight;
+ }
+ }
+
+
private static class TestSimilarity extends Similarity {
private final Similarity delegate = new BM25Similarity();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java
index 9c578ef6385..6fa2e21fbd1 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java
@@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis;
*/
import java.io.IOException;
-import java.lang.Thread.UncaughtExceptionHandler;
import java.util.Arrays;
import java.util.regex.Pattern;
@@ -110,45 +109,6 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
-
- // dodge jre bug http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7104012
- final UncaughtExceptionHandler savedHandler = Thread.getDefaultUncaughtExceptionHandler();
- Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
- @Override
- public void uncaughtException(Thread thread, Throwable throwable) {
- assumeTrue("not failing due to jre bug ", !isJREBug7104012(throwable));
- // otherwise its some other bug, pass to default handler
- savedHandler.uncaughtException(thread, throwable);
- }
- });
-
- try {
- Thread.getDefaultUncaughtExceptionHandler();
- checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
- } catch (ArrayIndexOutOfBoundsException ex) {
- assumeTrue("not failing due to jre bug ", !isJREBug7104012(ex));
- throw ex; // otherwise rethrow
- } finally {
- Thread.setDefaultUncaughtExceptionHandler(savedHandler);
- }
- }
-
- static boolean isJREBug7104012(Throwable t) {
- if (!(t instanceof ArrayIndexOutOfBoundsException)) {
- // BaseTokenStreamTestCase now wraps exc in a new RuntimeException:
- t = t.getCause();
- if (!(t instanceof ArrayIndexOutOfBoundsException)) {
- return false;
- }
- }
- StackTraceElement trace[] = t.getStackTrace();
- for (StackTraceElement st : trace) {
- if ("java.text.RuleBasedBreakIterator".equals(st.getClassName()) ||
- "sun.util.locale.provider.RuleBasedBreakIterator".equals(st.getClassName())
- && "lookupBackwardState".equals(st.getMethodName())) {
- return true;
- }
- }
- return false;
+ checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/cache/IndexCacheModuleTests.java b/core/src/test/java/org/elasticsearch/index/cache/IndexCacheModuleTests.java
deleted file mode 100644
index bd564744f20..00000000000
--- a/core/src/test/java/org/elasticsearch/index/cache/IndexCacheModuleTests.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.cache;
-
-import org.apache.lucene.search.QueryCachingPolicy;
-import org.apache.lucene.search.Weight;
-import org.elasticsearch.common.inject.ModuleTestCase;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.Index;
-import org.elasticsearch.index.cache.query.QueryCache;
-import org.elasticsearch.index.cache.query.index.IndexQueryCache;
-import org.elasticsearch.index.cache.query.none.NoneQueryCache;
-
-import java.io.IOException;
-
-public class IndexCacheModuleTests extends ModuleTestCase {
-
- public void testCannotRegisterProvidedImplementations() {
- IndexCacheModule module = new IndexCacheModule(Settings.EMPTY);
- try {
- module.registerQueryCache("index", IndexQueryCache.class);
- } catch (IllegalArgumentException e) {
- assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [index]");
- }
-
- try {
- module.registerQueryCache("none", NoneQueryCache.class);
- } catch (IllegalArgumentException e) {
- assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [none]");
- }
- }
-
- public void testRegisterCustomQueryCache() {
- IndexCacheModule module = new IndexCacheModule(
- Settings.builder().put(IndexCacheModule.QUERY_CACHE_TYPE, "custom").build()
- );
- module.registerQueryCache("custom", CustomQueryCache.class);
- try {
- module.registerQueryCache("custom", CustomQueryCache.class);
- } catch (IllegalArgumentException e) {
- assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [custom]");
- }
- assertBinding(module, QueryCache.class, CustomQueryCache.class);
- }
-
- public void testDefaultQueryCacheImplIsSelected() {
- IndexCacheModule module = new IndexCacheModule(Settings.EMPTY);
- assertBinding(module, QueryCache.class, IndexQueryCache.class);
- }
-
- class CustomQueryCache implements QueryCache {
-
- @Override
- public void clear(String reason) {
- }
-
- @Override
- public void close() throws IOException {
- }
-
- @Override
- public Index index() {
- return new Index("test");
- }
-
- @Override
- public Weight doCache(Weight weight, QueryCachingPolicy policy) {
- return weight;
- }
- }
-
-}
diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
index 3d2a001c049..5a015b90245 100644
--- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
@@ -37,12 +37,12 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -56,6 +56,8 @@ import static org.hamcrest.Matchers.equalTo;
public class BitSetFilterCacheTests extends ESTestCase {
private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY, Collections.emptyList());
+ private final IndicesWarmer warmer = new IndicesWarmer(Settings.EMPTY, null);
+
private static int matchCount(BitSetProducer producer, IndexReader reader) throws IOException {
int count = 0;
@@ -91,7 +93,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
IndexReader reader = DirectoryReader.open(writer, false);
IndexSearcher searcher = new IndexSearcher(reader);
- BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS);
+ BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
BitSetProducer filter = cache.getBitSetProducer(new TermQuery(new Term("field", "value")));
assertThat(matchCount(filter, reader), equalTo(3));
@@ -134,7 +136,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
final AtomicInteger onCacheCalls = new AtomicInteger();
final AtomicInteger onRemoveCalls = new AtomicInteger();
- final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS);
+ final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
cache.setListener(new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
@@ -173,7 +175,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
}
public void testSetListenerTwice() {
- final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS);
+ final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
cache.setListener(new BitsetFilterCache.Listener() {
@Override
diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
index 570ea3551fe..5d6f83b2d47 100644
--- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
+++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
@@ -31,6 +31,7 @@ import org.apache.lucene.codecs.lucene50.Lucene50Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene53.Lucene53Codec;
+import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@@ -49,7 +50,8 @@ public class CodecTests extends ESSingleNodeTestCase {
public void testResolveDefaultCodecs() throws Exception {
CodecService codecService = createCodecService();
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
- assertThat(codecService.codec("default"), instanceOf(Lucene53Codec.class));
+ assertThat(codecService.codec("default"), instanceOf(Lucene54Codec.class));
+ assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class));
assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class));
assertThat(codecService.codec("Lucene410"), instanceOf(Lucene410Codec.class));
assertThat(codecService.codec("Lucene49"), instanceOf(Lucene49Codec.class));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
index 18920810953..e9dd0ff6894 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.index.mapper.geo;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
@@ -86,7 +86,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
- assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
+ assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
}
public void testLatLonInOneValueWithGeohash() throws Exception {
@@ -104,7 +104,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
- assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
+ assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
}
public void testGeoHashIndexValue() throws Exception {
@@ -116,13 +116,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
- .field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
+ .field("point", GeoHashUtils.stringEncode(1.3, 1.2))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
- assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
+ assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
}
public void testGeoHashValue() throws Exception {
@@ -134,7 +134,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
- .field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
+ .field("point", GeoHashUtils.stringEncode(1.3, 1.2))
.endObject()
.bytes());
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
index 0e4da76eaf0..4b20d10f600 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper.geo;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
@@ -82,13 +82,13 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
- .field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
+ .field("point", GeoHashUtils.stringEncode(1.3, 1.2))
.endObject()
.bytes());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lon"), nullValue());
- MatcherAssert.assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
+ MatcherAssert.assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
MatcherAssert.assertThat(doc.rootDoc().get("point"), notNullValue());
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
index 9db4eceb4b4..01f61969606 100644
--- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
@@ -60,22 +60,20 @@ import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisModule;
-import org.elasticsearch.index.cache.IndexCacheModule;
+import org.elasticsearch.index.cache.IndexCache;
+import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
+import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper;
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
+import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
-import org.elasticsearch.script.MockScriptEngine;
-import org.elasticsearch.script.ScriptContext;
-import org.elasticsearch.script.ScriptContextRegistry;
-import org.elasticsearch.script.ScriptEngineService;
-import org.elasticsearch.script.ScriptModule;
-import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.*;
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
@@ -96,12 +94,7 @@ import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.Matchers.equalTo;
@@ -123,6 +116,7 @@ public abstract class AbstractQueryTestCase>
BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME };
protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME,
BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME };
+ private static final int NUMBER_OF_TESTQUERIES = 20;
private static Injector injector;
private static IndexQueryParserService queryParserService;
@@ -213,13 +207,16 @@ public abstract class AbstractQueryTestCase>
}
},
new IndexSettingsModule(index, indexSettings),
- new IndexCacheModule(indexSettings),
new AnalysisModule(indexSettings, new IndicesAnalysisService(indexSettings)),
new AbstractModule() {
@Override
protected void configure() {
- SimilarityService service = new SimilarityService(IndexSettingsModule.newIndexSettings(index, indexSettings, Collections.EMPTY_LIST), Collections.EMPTY_MAP);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, Collections.emptyList());
+ SimilarityService service = new SimilarityService(idxSettings, Collections.emptyMap());
bind(SimilarityService.class).toInstance(service);
+ BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
+ bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
+ bind(IndexCache.class).toInstance(new IndexCache(idxSettings, new NoneQueryCache(idxSettings), bitsetFilterCache));
bind(Client.class).toInstance(proxy);
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
bind(ScoreFunctionParserMapper.class).asEagerSingleton();
@@ -310,10 +307,12 @@ public abstract class AbstractQueryTestCase>
* and asserts equality on the two queries.
*/
public void testFromXContent() throws IOException {
- QB testQuery = createTestQueryBuilder();
- assertParsedQuery(testQuery.toString(), testQuery);
- for (Map.Entry alternateVersion : getAlternateVersions().entrySet()) {
- assertParsedQuery(alternateVersion.getKey(), alternateVersion.getValue());
+ for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
+ QB testQuery = createTestQueryBuilder();
+ assertParsedQuery(testQuery.toString(), testQuery);
+ for (Map.Entry alternateVersion : getAlternateVersions().entrySet()) {
+ assertParsedQuery(alternateVersion.getKey(), alternateVersion.getValue());
+ }
}
}
@@ -365,49 +364,52 @@ public abstract class AbstractQueryTestCase>
* assertions being made on the result to the implementing subclass.
*/
public void testToQuery() throws IOException {
- QueryShardContext context = createShardContext();
- context.setAllowUnmappedFields(true);
-
- QB firstQuery = createTestQueryBuilder();
- QB controlQuery = copyQuery(firstQuery);
- setSearchContext(randomTypes); // only set search context for toQuery to be more realistic
- Query firstLuceneQuery = firstQuery.toQuery(context);
- assertLuceneQuery(firstQuery, firstLuceneQuery, context);
- SearchContext.removeCurrent(); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well
- assertTrue("query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery,
- firstQuery.equals(controlQuery));
- assertTrue("equals is not symmetric after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery,
- controlQuery.equals(firstQuery));
- assertThat("query copy's hashcode is different from original hashcode after calling toQuery, firstQuery: " + firstQuery
- + ", secondQuery: " + controlQuery, controlQuery.hashCode(), equalTo(firstQuery.hashCode()));
-
-
- QB secondQuery = copyQuery(firstQuery);
- //query _name never should affect the result of toQuery, we randomly set it to make sure
- if (randomBoolean()) {
- secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName() + randomAsciiOfLengthBetween(1, 10));
- }
- setSearchContext(randomTypes); // only set search context for toQuery to be more realistic
- Query secondLuceneQuery = secondQuery.toQuery(context);
- assertLuceneQuery(secondQuery, secondLuceneQuery, context);
- SearchContext.removeCurrent(); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well
-
- assertThat("two equivalent query builders lead to different lucene queries", secondLuceneQuery, equalTo(firstLuceneQuery));
-
- //if the initial lucene query is null, changing its boost won't have any effect, we shouldn't test that
- if (firstLuceneQuery != null && supportsBoostAndQueryName()) {
- secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
+ for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
+ QueryShardContext context = createShardContext();
+ context.setAllowUnmappedFields(true);
+ QB firstQuery = createTestQueryBuilder();
+ QB controlQuery = copyQuery(firstQuery);
setSearchContext(randomTypes); // only set search context for toQuery to be more realistic
- Query thirdLuceneQuery = secondQuery.toQuery(context);
+ Query firstLuceneQuery = firstQuery.toQuery(context);
+ assertLuceneQuery(firstQuery, firstLuceneQuery, context);
+ SearchContext.removeCurrent(); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well
+ assertTrue(
+ "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery,
+ firstQuery.equals(controlQuery));
+ assertTrue("equals is not symmetric after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery,
+ controlQuery.equals(firstQuery));
+ assertThat("query copy's hashcode is different from original hashcode after calling toQuery, firstQuery: " + firstQuery
+ + ", secondQuery: " + controlQuery, controlQuery.hashCode(), equalTo(firstQuery.hashCode()));
+
+ QB secondQuery = copyQuery(firstQuery);
+ // query _name never should affect the result of toQuery, we randomly set it to make sure
+ if (randomBoolean()) {
+ secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName()
+ + randomAsciiOfLengthBetween(1, 10));
+ }
+ setSearchContext(randomTypes);
+ Query secondLuceneQuery = secondQuery.toQuery(context);
+ assertLuceneQuery(secondQuery, secondLuceneQuery, context);
SearchContext.removeCurrent();
- assertThat("modifying the boost doesn't affect the corresponding lucene query", firstLuceneQuery, not(equalTo(thirdLuceneQuery)));
+
+ assertThat("two equivalent query builders lead to different lucene queries", secondLuceneQuery, equalTo(firstLuceneQuery));
+
+ // if the initial lucene query is null, changing its boost won't have any effect, we shouldn't test that
+ if (firstLuceneQuery != null && supportsBoostAndQueryName()) {
+ secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
+ setSearchContext(randomTypes);
+ Query thirdLuceneQuery = secondQuery.toQuery(context);
+ SearchContext.removeCurrent();
+ assertThat("modifying the boost doesn't affect the corresponding lucene query", firstLuceneQuery,
+ not(equalTo(thirdLuceneQuery)));
+ }
}
}
/**
* Few queries allow you to set the boost and queryName on the java api, although the corresponding parser doesn't parse them as they are not supported.
* This method allows to disable boost and queryName related tests for those queries. Those queries are easy to identify: their parsers
- * don't parse `boost` and `_name` as they don't apply to the specific query: filter query, wrapper query and match_none
+ * don't parse `boost` and `_name` as they don't apply to the specific query: wrapper query and match_none
*/
protected boolean supportsBoostAndQueryName() {
return true;
@@ -448,8 +450,10 @@ public abstract class AbstractQueryTestCase>
* Test serialization and deserialization of the test query.
*/
public void testSerialization() throws IOException {
- QB testQuery = createTestQueryBuilder();
- assertSerialization(testQuery);
+ for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
+ QB testQuery = createTestQueryBuilder();
+ assertSerialization(testQuery);
+ }
}
/**
@@ -461,7 +465,7 @@ public abstract class AbstractQueryTestCase>
testQuery.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
QueryBuilder> prototype = queryParser(testQuery.getName()).getBuilderPrototype();
- QueryBuilder deserializedQuery = prototype.readFrom(in);
+ QueryBuilder> deserializedQuery = prototype.readFrom(in);
assertEquals(deserializedQuery, testQuery);
assertEquals(deserializedQuery.hashCode(), testQuery.hashCode());
assertNotSame(deserializedQuery, testQuery);
@@ -471,35 +475,38 @@ public abstract class AbstractQueryTestCase>
}
public void testEqualsAndHashcode() throws IOException {
- QB firstQuery = createTestQueryBuilder();
- assertFalse("query is equal to null", firstQuery.equals(null));
- assertFalse("query is equal to incompatible type", firstQuery.equals(""));
- assertTrue("query is not equal to self", firstQuery.equals(firstQuery));
- assertThat("same query's hashcode returns different values if called multiple times", firstQuery.hashCode(), equalTo(firstQuery.hashCode()));
+ for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
+ QB firstQuery = createTestQueryBuilder();
+ assertFalse("query is equal to null", firstQuery.equals(null));
+ assertFalse("query is equal to incompatible type", firstQuery.equals(""));
+ assertTrue("query is not equal to self", firstQuery.equals(firstQuery));
+ assertThat("same query's hashcode returns different values if called multiple times", firstQuery.hashCode(),
+ equalTo(firstQuery.hashCode()));
- QB secondQuery = copyQuery(firstQuery);
- assertTrue("query is not equal to self", secondQuery.equals(secondQuery));
- assertTrue("query is not equal to its copy", firstQuery.equals(secondQuery));
- assertTrue("equals is not symmetric", secondQuery.equals(firstQuery));
- assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(firstQuery.hashCode()));
+ QB secondQuery = copyQuery(firstQuery);
+ assertTrue("query is not equal to self", secondQuery.equals(secondQuery));
+ assertTrue("query is not equal to its copy", firstQuery.equals(secondQuery));
+ assertTrue("equals is not symmetric", secondQuery.equals(firstQuery));
+ assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(firstQuery.hashCode()));
- QB thirdQuery = copyQuery(secondQuery);
- assertTrue("query is not equal to self", thirdQuery.equals(thirdQuery));
- assertTrue("query is not equal to its copy", secondQuery.equals(thirdQuery));
- assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(thirdQuery.hashCode()));
- assertTrue("equals is not transitive", firstQuery.equals(thirdQuery));
- assertThat("query copy's hashcode is different from original hashcode", firstQuery.hashCode(), equalTo(thirdQuery.hashCode()));
- assertTrue("equals is not symmetric", thirdQuery.equals(secondQuery));
- assertTrue("equals is not symmetric", thirdQuery.equals(firstQuery));
+ QB thirdQuery = copyQuery(secondQuery);
+ assertTrue("query is not equal to self", thirdQuery.equals(thirdQuery));
+ assertTrue("query is not equal to its copy", secondQuery.equals(thirdQuery));
+ assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(thirdQuery.hashCode()));
+ assertTrue("equals is not transitive", firstQuery.equals(thirdQuery));
+ assertThat("query copy's hashcode is different from original hashcode", firstQuery.hashCode(), equalTo(thirdQuery.hashCode()));
+ assertTrue("equals is not symmetric", thirdQuery.equals(secondQuery));
+ assertTrue("equals is not symmetric", thirdQuery.equals(firstQuery));
- if (randomBoolean()) {
- secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName()
- + randomAsciiOfLengthBetween(1, 10));
- } else {
- secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
+ if (randomBoolean()) {
+ secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName()
+ + randomAsciiOfLengthBetween(1, 10));
+ } else {
+ secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
+ }
+ assertThat("different queries should not be equal", secondQuery, not(equalTo(firstQuery)));
+ assertThat("different queries should have different hashcode", secondQuery.hashCode(), not(equalTo(firstQuery.hashCode())));
}
- assertThat("different queries should not be equal", secondQuery, not(equalTo(firstQuery)));
- assertThat("different queries should have different hashcode", secondQuery.hashCode(), not(equalTo(firstQuery.hashCode())));
}
private QueryParser> queryParser(String queryId) {
diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
index 3280ef2679d..4b19377f54f 100644
--- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
@@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.test.geo.RandomShapeGenerator;
+import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
import org.junit.After;
import java.io.IOException;
@@ -55,8 +56,10 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase 0);
super.testToQuery();
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
index fde284a45d3..714aa2c2dda 100644
--- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
@@ -20,17 +20,11 @@
package org.elasticsearch.index.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
-import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.support.QueryInnerHits;
@@ -41,7 +35,6 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.TestSearchContext;
import java.io.IOException;
-import java.util.Arrays;
import static org.hamcrest.CoreMatchers.instanceOf;
@@ -131,38 +124,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase {
-
- @Override
- protected QueryFilterBuilder doCreateTestQueryBuilder() {
- QueryBuilder innerQuery = RandomQueryBuilder.createQuery(random());
- return new QueryFilterBuilder(innerQuery);
- }
-
- @Override
- protected void doAssertLuceneQuery(QueryFilterBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
- Query innerQuery = queryBuilder.innerQuery().toQuery(context);
- if (innerQuery == null) {
- assertThat(query, nullValue());
- } else {
- assertThat(query, instanceOf(ConstantScoreQuery.class));
- ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
- assertThat(constantScoreQuery.getQuery(), equalTo(innerQuery));
- }
- }
-
- @Override
- protected boolean supportsBoostAndQueryName() {
- return false;
- }
-
- /**
- * test that wrapping an inner filter that returns null also returns null to pass on upwards
- */
- public void testInnerQueryReturnsNull() throws IOException {
- // create inner filter
- String queryString = "{ \"constant_score\" : { \"filter\" : {} } }";
- QueryBuilder> innerQuery = parseQuery(queryString);
- // check that when wrapping this filter, toQuery() returns null
- QueryFilterBuilder queryFilterQuery = new QueryFilterBuilder(innerQuery);
- assertNull(queryFilterQuery.toQuery(createShardContext()));
- }
-
- public void testValidate() {
- try {
- new QueryFilterBuilder(null);
- fail("cannot be null");
- } catch (IllegalArgumentException e) {
- // expected
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
index 480517bd705..d1dc05fdbdf 100644
--- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
@@ -27,28 +27,38 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
-
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
+import java.util.TreeMap;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase {
+ private String[] queryTerms;
+
@Override
protected SimpleQueryStringBuilder doCreateTestQueryBuilder() {
- SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(randomAsciiOfLengthBetween(1, 10));
+ int numberOfTerms = randomIntBetween(1, 5);
+ queryTerms = new String[numberOfTerms];
+ StringBuilder queryString = new StringBuilder();
+ for (int i = 0; i < numberOfTerms; i++) {
+ queryTerms[i] = randomAsciiOfLengthBetween(1, 10);
+ queryString.append(queryTerms[i] + " ");
+ }
+ SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(queryString.toString().trim());
if (randomBoolean()) {
result.analyzeWildcard(randomBoolean());
}
@@ -72,9 +82,13 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase flagSet = new HashSet<>();
+ if (numberOfTerms > 1) {
+ flagSet.add(SimpleQueryStringFlag.WHITESPACE);
+ }
int size = randomIntBetween(0, SimpleQueryStringFlag.values().length);
for (int i = 0; i < size; i++) {
- flagSet.add(randomFrom(SimpleQueryStringFlag.values()));
+ SimpleQueryStringFlag randomFlag = randomFrom(SimpleQueryStringFlag.values());
+ flagSet.add(randomFlag);
}
if (flagSet.size() > 0) {
result.flags(flagSet.toArray(new SimpleQueryStringFlag[flagSet.size()]));
@@ -85,13 +99,12 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase fields = new HashMap<>();
for (int i = 0; i < fieldCount; i++) {
if (randomBoolean()) {
- fields.put(randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST);
+ fields.put("f" + i + "_" + randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST);
} else {
- fields.put(randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20));
+ fields.put(randomBoolean() ? STRING_FIELD_NAME : "f" + i + "_" + randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20));
}
}
result.fields(fields);
-
return result;
}
@@ -256,8 +269,8 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0 || shardContext.indexQueryParserService().getIndexCreatedVersion().before(Version.V_1_4_0_Beta1)) {
Query luceneQuery = queryBuilder.toQuery(shardContext);
- assertThat(luceneQuery, instanceOf(TermQuery.class));
- TermQuery termQuery = (TermQuery) luceneQuery;
+ assertThat(luceneQuery, instanceOf(BooleanQuery.class));
+ TermQuery termQuery = (TermQuery) ((BooleanQuery) luceneQuery).clauses().get(0).getQuery();
assertThat(termQuery.getTerm(), equalTo(new Term(MetaData.ALL, query)));
}
}
@@ -275,7 +288,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 1) {
+ } else {
assertTrue("Query should have been BooleanQuery but was " + query.getClass().getName(), query instanceof BooleanQuery);
BooleanQuery boolQuery = (BooleanQuery) query;
@@ -288,32 +301,42 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase fields = queryBuilder.fields().keySet().iterator();
- for (BooleanClause booleanClause : boolQuery) {
- assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
- TermQuery termQuery = (TermQuery) booleanClause.getQuery();
- assertThat(termQuery.getTerm().field(), equalTo(fields.next()));
- assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT)));
+ assertThat(boolQuery.clauses().size(), equalTo(queryTerms.length));
+ Map expectedFields = new TreeMap(queryBuilder.fields());
+ if (expectedFields.size() == 0) {
+ expectedFields.put(MetaData.ALL, AbstractQueryBuilder.DEFAULT_BOOST);
+ }
+ for (int i = 0; i < queryTerms.length; i++) {
+ BooleanClause booleanClause = boolQuery.clauses().get(i);
+ Iterator> fieldsIter = expectedFields.entrySet().iterator();
+
+ if (queryTerms.length == 1 && expectedFields.size() == 1) {
+ assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
+ TermQuery termQuery = (TermQuery) booleanClause.getQuery();
+ Entry entry = fieldsIter.next();
+ assertThat(termQuery.getTerm().field(), equalTo(entry.getKey()));
+ assertThat(termQuery.getBoost(), equalTo(entry.getValue()));
+ assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryTerms[i].toLowerCase(Locale.ROOT)));
+ } else {
+ assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class));
+ for (BooleanClause clause : ((BooleanQuery) booleanClause.getQuery()).clauses()) {
+ TermQuery termQuery = (TermQuery) clause.getQuery();
+ Entry entry = fieldsIter.next();
+ assertThat(termQuery.getTerm().field(), equalTo(entry.getKey()));
+ assertThat(termQuery.getBoost(), equalTo(entry.getValue()));
+ assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryTerms[i].toLowerCase(Locale.ROOT)));
+ }
+ }
}
if (queryBuilder.minimumShouldMatch() != null) {
- assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0));
+ int optionalClauses = queryTerms.length;
+ if (queryBuilder.defaultOperator().equals(Operator.AND) && queryTerms.length > 1) {
+ optionalClauses = 0;
+ }
+ int expectedMinimumShouldMatch = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch());
+ assertEquals(expectedMinimumShouldMatch, boolQuery.getMinimumNumberShouldMatch());
}
- } else if (queryBuilder.fields().size() <= 1) {
- assertTrue("Query should have been TermQuery but was " + query.getClass().getName(), query instanceof TermQuery);
-
- TermQuery termQuery = (TermQuery) query;
- String field;
- if (queryBuilder.fields().size() == 0) {
- field = MetaData.ALL;
- } else {
- field = queryBuilder.fields().keySet().iterator().next();
- }
- assertThat(termQuery.getTerm().field(), equalTo(field));
- assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT)));
- } else {
- fail("Encountered lucene query type we do not have a validation implementation for in our " + SimpleQueryStringBuilderTests.class.getSimpleName());
}
}
@@ -339,15 +362,18 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 1) {
+ expectedMinimumShouldMatch = 0;
+ }
+
+ assertEquals(expectedMinimumShouldMatch, query.getMinimumNumberShouldMatch());
+ for (BooleanClause clause : query.clauses()) {
+ if (numberOfFields == 1 && numberOfTerms == 1) {
+ assertTrue(clause.getQuery() instanceof TermQuery);
+ } else {
+ assertEquals(numberOfFields, ((BooleanQuery) clause.getQuery()).clauses().size());
+ }
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java
index c4951640cce..ce5eca5d11c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java
@@ -88,23 +88,12 @@ public class TemplateQueryIT extends ESIntegTestCase {
}
public void testTemplateInBodyWithSize() throws IOException {
- String request = "{\n" +
- " \"size\":0," +
- " \"query\": {\n" +
- " \"template\": {\n" +
- " \"query\": {\"match_{{template}}\": {}},\n" +
- " \"params\" : {\n" +
- " \"template\" : \"all\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
Map params = new HashMap<>();
params.put("template", "all");
SearchResponse sr = client().prepareSearch()
.setSource(
new SearchSourceBuilder().size(0).query(
- QueryBuilders.templateQuery(new Template("{ \"query\": { \"match_{{template}}\": {} } }",
+ QueryBuilders.templateQuery(new Template("{ \"match_{{template}}\": {} }",
ScriptType.INLINE, null, null, params)))).execute()
.actionGet();
assertNoFailures(sr);
diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java
index 813644406b2..22eb8cc39b7 100644
--- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java
@@ -37,11 +37,15 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
+import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisModule;
-import org.elasticsearch.index.cache.IndexCacheModule;
+import org.elasticsearch.index.cache.IndexCache;
+import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
+import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
+import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
@@ -95,12 +99,15 @@ public class TemplateQueryParserTests extends ESTestCase {
},
new ScriptModule(settings),
new IndexSettingsModule(index, settings),
- new IndexCacheModule(settings),
new AnalysisModule(settings, new IndicesAnalysisService(settings)),
new AbstractModule() {
@Override
protected void configure() {
- SimilarityService service = new SimilarityService(IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST), Collections.EMPTY_MAP);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
+ SimilarityService service = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
+ BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
+ bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
+ bind(IndexCache.class).toInstance(new IndexCache(idxSettings, new NoneQueryCache(idxSettings), bitsetFilterCache));
bind(SimilarityService.class).toInstance(service);
bind(Client.class).toInstance(proxy); // not needed here
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
index d326beacd23..3976823aa6b 100644
--- a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
@@ -19,8 +19,6 @@
package org.elasticsearch.index.query;
-import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@@ -30,7 +28,6 @@ import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
-import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -45,9 +42,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.instanceOf;
-import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.*;
public class TermsQueryBuilderTests extends AbstractQueryTestCase {
private List randomTerms;
@@ -206,60 +201,6 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase... classpathPlugins) {
return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
}
@@ -86,4 +110,17 @@ public class PluginsServiceTests extends ESTestCase {
assertTrue(msg, msg.contains("plugin [additional-settings2]"));
}
}
+
+ public void testOnModuleExceptionsArePropagated() {
+ Settings settings = Settings.builder()
+ .put("path.home", createTempDir()).build();
+ PluginsService service = newPluginsService(settings, FailOnModule.class);
+ try {
+ service.processModule(new BrokenModule());
+ fail("boom");
+ } catch (ElasticsearchException ex) {
+ assertEquals("failed to invoke onModule", ex.getMessage());
+ assertEquals("boom", ex.getCause().getCause().getMessage());
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
index efdcf0062c3..376e8578e2e 100644
--- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java
@@ -31,7 +31,7 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
public class SearchModuleTests extends ModuleTestCase {
public void testDoubleRegister() {
- SearchModule module = new SearchModule(Settings.EMPTY);
+ SearchModule module = new SearchModule();
try {
module.registerHighlighter("fvh", PlainHighlighter.class);
} catch (IllegalArgumentException e) {
@@ -46,7 +46,7 @@ public class SearchModuleTests extends ModuleTestCase {
}
public void testRegisterSuggester() {
- SearchModule module = new SearchModule(Settings.EMPTY);
+ SearchModule module = new SearchModule();
module.registerSuggester("custom", CustomSuggester.class);
try {
module.registerSuggester("custom", CustomSuggester.class);
@@ -57,7 +57,7 @@ public class SearchModuleTests extends ModuleTestCase {
}
public void testRegisterHighlighter() {
- SearchModule module = new SearchModule(Settings.EMPTY);
+ SearchModule module = new SearchModule();
module.registerHighlighter("custom", CustomHighlighter.class);
try {
module.registerHighlighter("custom", CustomHighlighter.class);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
index 534e781b1cb..6a49932c4ee 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
@@ -20,11 +20,9 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
-import com.carrotsearch.hppc.ObjectObjectHashMap;
-import com.carrotsearch.hppc.ObjectObjectMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
@@ -86,13 +84,13 @@ public class GeoHashGridIT extends ESIntegTestCase {
//generate random point
double lat = (180d * random.nextDouble()) - 90d;
double lng = (360d * random.nextDouble()) - 180d;
- String randomGeoHash = XGeoHashUtils.stringEncode(lng, lat, XGeoHashUtils.PRECISION);
+ String randomGeoHash = GeoHashUtils.stringEncode(lng, lat, GeoHashUtils.PRECISION);
//Index at the highest resolution
cities.add(indexCity("idx", randomGeoHash, lat + ", " + lng));
expectedDocCountsForGeoHash.put(randomGeoHash, expectedDocCountsForGeoHash.getOrDefault(randomGeoHash, 0) + 1);
//Update expected doc counts for all resolutions..
- for (int precision = XGeoHashUtils.PRECISION - 1; precision > 0; precision--) {
- String hash = XGeoHashUtils.stringEncode(lng, lat, precision);
+ for (int precision = GeoHashUtils.PRECISION - 1; precision > 0; precision--) {
+ String hash = GeoHashUtils.stringEncode(lng, lat, precision);
if ((smallestGeoHash == null) || (hash.length() < smallestGeoHash.length())) {
smallestGeoHash = hash;
}
@@ -115,8 +113,8 @@ public class GeoHashGridIT extends ESIntegTestCase {
double lng = (360d * random.nextDouble()) - 180d;
points.add(lat + "," + lng);
// Update expected doc counts for all resolutions..
- for (int precision = XGeoHashUtils.PRECISION; precision > 0; precision--) {
- final String geoHash = XGeoHashUtils.stringEncode(lng, lat, precision);
+ for (int precision = GeoHashUtils.PRECISION; precision > 0; precision--) {
+ final String geoHash = GeoHashUtils.stringEncode(lng, lat, precision);
geoHashes.add(geoHash);
}
}
@@ -131,7 +129,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
}
public void testSimple() throws Exception {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(geohashGrid("geohashgrid")
.field("location")
@@ -155,14 +153,14 @@ public class GeoHashGridIT extends ESIntegTestCase {
assertEquals("Geohash " + geohash + " has wrong doc count ",
expectedBucketCount, bucketCount);
GeoPoint geoPoint = (GeoPoint) propertiesKeys[i];
- assertThat(XGeoHashUtils.stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash));
+ assertThat(GeoHashUtils.stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash));
assertThat((long) propertiesDocCounts[i], equalTo(bucketCount));
}
}
}
public void testMultivalued() throws Exception {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("multi_valued_idx")
.addAggregation(geohashGrid("geohashgrid")
.field("location")
@@ -188,7 +186,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
public void testFiltered() throws Exception {
GeoBoundingBoxQueryBuilder bbox = new GeoBoundingBoxQueryBuilder("location");
bbox.setCorners(smallestGeoHash, smallestGeoHash).queryName("bbox");
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(
AggregationBuilders.filter("filtered").filter(bbox)
@@ -219,7 +217,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
}
public void testUnmapped() throws Exception {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx_unmapped")
.addAggregation(geohashGrid("geohashgrid")
.field("location")
@@ -236,7 +234,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
}
public void testPartiallyUnmapped() throws Exception {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
.addAggregation(geohashGrid("geohashgrid")
.field("location")
@@ -260,7 +258,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
}
public void testTopMatch() throws Exception {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(geohashGrid("geohashgrid")
.field("location")
@@ -293,7 +291,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
// making sure this doesn't runs into an OOME
public void testSizeIsZero() {
- for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
+ for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
final int size = randomBoolean() ? 0 : randomIntBetween(1, Integer.MAX_VALUE);
final int shardSize = randomBoolean() ? -1 : 0;
SearchResponse response = client().prepareSearch("idx")
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java
index 7e3b9595983..d138c0ccd3e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.aggregations.bucket;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.QueryBuilders;
@@ -64,7 +64,7 @@ public class ShardReduceIT extends ESIntegTestCase {
.startObject()
.field("value", value)
.field("ip", "10.0.0." + value)
- .field("location", XGeoHashUtils.stringEncode(5, 52, XGeoHashUtils.PRECISION))
+ .field("location", GeoHashUtils.stringEncode(5, 52, GeoHashUtils.PRECISION))
.field("date", date)
.field("term-l", 1)
.field("term-d", 1.5)
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
index 58d77b0eedd..f2acc7c83a8 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
@@ -23,7 +23,7 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
@@ -203,8 +203,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
}
private void updateGeohashBucketsCentroid(final GeoPoint location) {
- String hash = XGeoHashUtils.stringEncode(location.lon(), location.lat(), XGeoHashUtils.PRECISION);
- for (int precision = XGeoHashUtils.PRECISION; precision > 0; --precision) {
+ String hash = GeoHashUtils.stringEncode(location.lon(), location.lat(), GeoHashUtils.PRECISION);
+ for (int precision = GeoHashUtils.PRECISION; precision > 0; --precision) {
final String h = hash.substring(0, precision);
expectedDocCountsForGeoHash.put(h, expectedDocCountsForGeoHash.getOrDefault(h, 0) + 1);
expectedCentroidsForGeoHash.put(h, updateHashCentroid(h, location));
diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
index 95f0ead82fc..e360649e919 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
@@ -30,7 +30,7 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.cache.IndexCacheModule;
+import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.IdsQueryBuilder;
@@ -97,8 +97,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
// aggressive filter caching so that we can assert on the filter cache size
- .put(IndexCacheModule.QUERY_CACHE_TYPE, IndexCacheModule.INDEX_QUERY_CACHE)
- .put(IndexCacheModule.QUERY_CACHE_EVERYTHING, true)
+ .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE)
+ .put(IndexModule.QUERY_CACHE_EVERYTHING, true)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
index 5bbc1815a80..ee2fb575ed8 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
@@ -29,7 +29,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkResponse;
@@ -465,8 +465,8 @@ public class GeoFilterIT extends ESIntegTestCase {
String geohash = randomhash(10);
logger.info("Testing geohash_cell filter for [{}]", geohash);
- Collection extends CharSequence> neighbors = XGeoHashUtils.neighbors(geohash);
- Collection extends CharSequence> parentNeighbors = XGeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));
+ Collection extends CharSequence> neighbors = GeoHashUtils.neighbors(geohash);
+ Collection extends CharSequence> parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));
logger.info("Neighbors {}", neighbors);
logger.info("Parent Neighbors {}", parentNeighbors);
@@ -536,24 +536,24 @@ public class GeoFilterIT extends ESIntegTestCase {
public void testNeighbors() {
// Simple root case
- assertThat(XGeoHashUtils.addNeighbors("7", new ArrayList()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
+ assertThat(GeoHashUtils.addNeighbors("7", new ArrayList()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
// Root cases (Outer cells)
- assertThat(XGeoHashUtils.addNeighbors("0", new ArrayList()), containsInAnyOrder("1", "2", "3", "p", "r"));
- assertThat(XGeoHashUtils.addNeighbors("b", new ArrayList()), containsInAnyOrder("8", "9", "c", "x", "z"));
- assertThat(XGeoHashUtils.addNeighbors("p", new ArrayList()), containsInAnyOrder("n", "q", "r", "0", "2"));
- assertThat(XGeoHashUtils.addNeighbors("z", new ArrayList()), containsInAnyOrder("8", "b", "w", "x", "y"));
+ assertThat(GeoHashUtils.addNeighbors("0", new ArrayList()), containsInAnyOrder("1", "2", "3", "p", "r"));
+ assertThat(GeoHashUtils.addNeighbors("b", new ArrayList()), containsInAnyOrder("8", "9", "c", "x", "z"));
+ assertThat(GeoHashUtils.addNeighbors("p", new ArrayList()), containsInAnyOrder("n", "q", "r", "0", "2"));
+ assertThat(GeoHashUtils.addNeighbors("z", new ArrayList()), containsInAnyOrder("8", "b", "w", "x", "y"));
// Root crossing dateline
- assertThat(XGeoHashUtils.addNeighbors("2", new ArrayList()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
- assertThat(XGeoHashUtils.addNeighbors("r", new ArrayList()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
+ assertThat(GeoHashUtils.addNeighbors("2", new ArrayList()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
+ assertThat(GeoHashUtils.addNeighbors("r", new ArrayList()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
// level1: simple case
- assertThat(XGeoHashUtils.addNeighbors("dk", new ArrayList()), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
+ assertThat(GeoHashUtils.addNeighbors("dk", new ArrayList()), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
// Level1: crossing cells
- assertThat(XGeoHashUtils.addNeighbors("d5", new ArrayList()), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
- assertThat(XGeoHashUtils.addNeighbors("d0", new ArrayList()), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
+ assertThat(GeoHashUtils.addNeighbors("d5", new ArrayList()), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
+ assertThat(GeoHashUtils.addNeighbors("d0", new ArrayList()), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
}
public static double distance(double lat1, double lon1, double lat2, double lon2) {
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java
index e1f45d746ed..a1f39ef6ee2 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java
@@ -298,7 +298,6 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase {
assertHitCount(result, 1);
}
- @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9904")
public void testShapeFilterWithRandomGeoCollection() throws Exception {
// Create a random geometry collection.
GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(getRandom());
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
index 865c5bf24f2..8a2506ec3ad 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
@@ -107,7 +107,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.setSource(jsonBuilder().startObject().field("text", "text").endObject())
.get();
refresh();
- String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"});
+ String highlighter = randomFrom("plain", "postings", "fvh");
SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text")))
.highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighter))).get();
assertHighlight(search, 0, "text", 0, equalTo("text "));
@@ -147,7 +147,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.setSource(jsonBuilder().startObject().field("long_text", builder.toString()).field("text", "text").endObject())
.get();
refresh();
- String highlighter = randomFrom(new String[] { "plain", "postings", "fvh" });
+ String highlighter = randomFrom("plain", "postings", "fvh");
SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text")))
.highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighter))).get();
assertHighlight(search, 0, "text", 0, equalTo("text "));
@@ -192,7 +192,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject())
.get();
refresh();
- String highlighter = randomFrom(new String[] { "plain", "postings", "fvh" });
+ String highlighter = randomFrom("plain", "postings", "fvh");
SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text")))
.highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighter))).get();
assertHighlight(search, 0, "text", 0, equalTo("text "));
@@ -1329,14 +1329,14 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertFailures(client().prepareSearch()
.setQuery(matchPhraseQuery("title", "this is a test"))
- .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fast-vector-highlighter")),
+ .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")),
RestStatus.BAD_REQUEST,
containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter"));
//should not fail if there is a wildcard
assertNoFailures(client().prepareSearch()
.setQuery(matchPhraseQuery("title", "this is a test"))
- .highlighter(new HighlightBuilder().field("tit*", 50, 1, 10).highlighterType("fast-vector-highlighter")).get());
+ .highlighter(new HighlightBuilder().field("tit*", 50, 1, 10).highlighterType("fvh")).get());
}
public void testDisableFastVectorHighlighter() throws Exception {
@@ -1364,7 +1364,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
// Using plain highlighter instead of FVH
search = client().prepareSearch()
.setQuery(matchPhraseQuery("title", "test for the workaround"))
- .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("highlighter"))
+ .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("plain"))
.get();
for (int i = 0; i < indexRequestBuilders.length; i++) {
@@ -1375,8 +1375,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
search = client().prepareSearch()
.setQuery(matchPhraseQuery("title", "test for the workaround"))
.highlighter(
- new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("highlighter")).highlighterType(
- "highlighter"))
+ new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("plain")).highlighterType(
+ "plain"))
.get();
for (int i = 0; i < indexRequestBuilders.length; i++) {
@@ -2058,7 +2058,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter");
source = searchSource()
.query(matchPhraseQuery("_all", "quick brown"))
- .highlighter(highlight().field("field2").preTags("").postTags(" ").highlighterType("highlighter").requireFieldMatch(false));
+ .highlighter(highlight().field("field2").preTags("").postTags(" ").highlighterType("plain").requireFieldMatch(false));
searchResponse = client().search(searchRequest("test").source(source)).actionGet();
@@ -2300,7 +2300,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertFailures(client().prepareSearch()
.setQuery(matchQuery("title", "this is a test"))
- .highlighter(new HighlightBuilder().field("title").highlighterType("postings-highlighter")),
+ .highlighter(new HighlightBuilder().field("title").highlighterType("postings")),
RestStatus.BAD_REQUEST,
containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter"));
diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
index 358122f54ec..404b221e389 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
@@ -109,7 +109,6 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"),
client().prepareIndex("test", "type1", "4").setSource("body", "foo baz bar"));
-
logger.info("--> query 1");
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get();
assertHitCount(searchResponse, 2l);
@@ -120,7 +119,13 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
assertHitCount(searchResponse, 2l);
assertSearchHits(searchResponse, "3", "4");
- logger.info("--> query 3");
+ logger.info("--> query 3"); // test case from #13884
+ searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo")
+ .field("body").field("body2").field("body3").minimumShouldMatch("-50%")).get();
+ assertHitCount(searchResponse, 3l);
+ assertSearchHits(searchResponse, "1", "3", "4");
+
+ logger.info("--> query 4");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body").field("body2").minimumShouldMatch("70%")).get();
assertHitCount(searchResponse, 2l);
assertSearchHits(searchResponse, "3", "4");
@@ -131,17 +136,17 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "7").setSource("body2", "foo bar", "other", "foo"),
client().prepareIndex("test", "type1", "8").setSource("body2", "foo baz bar", "other", "foo"));
- logger.info("--> query 4");
+ logger.info("--> query 5");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")).get();
assertHitCount(searchResponse, 4l);
assertSearchHits(searchResponse, "3", "4", "7", "8");
- logger.info("--> query 5");
+ logger.info("--> query 6");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get();
assertHitCount(searchResponse, 5l);
assertSearchHits(searchResponse, "3", "4", "6", "7", "8");
- logger.info("--> query 6");
+ logger.info("--> query 7");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body2").field("other").minimumShouldMatch("70%")).get();
assertHitCount(searchResponse, 3l);
assertSearchHits(searchResponse, "6", "7", "8");
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java
index 8deeb0dee19..17111ae0a70 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchIT.java
@@ -19,7 +19,7 @@
package org.elasticsearch.search.suggest;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
@@ -818,7 +818,7 @@ public class ContextSuggestSearchIT extends ESIntegTestCase {
double latitude = 52.22;
double longitude = 4.53;
- String geohash = XGeoHashUtils.stringEncode(longitude, latitude);
+ String geohash = GeoHashUtils.stringEncode(longitude, latitude);
XContentBuilder doc1 = jsonBuilder().startObject().startObject("suggest_geo").field("input", "Hotel Marriot in Amsterdam").startObject("context").startObject("location").field("lat", latitude).field("lon", longitude).endObject().endObject().endObject().endObject();
index("test", "test", "1", doc1);
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTests.java
index 9e3043dc624..4fbde2d9058 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTests.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTests.java
@@ -23,7 +23,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene53.Lucene53Codec;
+import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.Fields;
@@ -272,7 +272,7 @@ public class CompletionPostingsFormatTests extends ESTestCase {
public Lookup buildAnalyzingLookup(final CompletionFieldMapper mapper, String[] terms, String[] surfaces, long[] weights)
throws IOException {
RAMDirectory dir = new RAMDirectory();
- Codec codec = new Lucene53Codec() {
+ Codec codec = new Lucene54Codec() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
final PostingsFormat in = super.getPostingsFormatForField(field);
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/context/GeoLocationContextMappingTests.java b/core/src/test/java/org/elasticsearch/search/suggest/context/GeoLocationContextMappingTests.java
index 4d66c7f82f5..0e4f566b0f3 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/context/GeoLocationContextMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/context/GeoLocationContextMappingTests.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.suggest.context;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@@ -52,7 +52,7 @@ public class GeoLocationContextMappingTests extends ESTestCase {
XContentParser parser = XContentHelper.createParser(builder.bytes());
parser.nextToken();
- String geohash = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
+ String geohash = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
HashMap config = new HashMap<>();
config.put("precision", 12);
config.put("default", geohash);
@@ -171,8 +171,8 @@ public class GeoLocationContextMappingTests extends ESTestCase {
}
public void testUseWithMultiGeoHashGeoContext() throws Exception {
- String geohash1 = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
- String geohash2 = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
+ String geohash1 = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
+ String geohash2 = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
XContentBuilder builder = jsonBuilder().startObject().startArray("location").value(geohash1).value(geohash2).endArray().endObject();
XContentParser parser = XContentHelper.createParser(builder.bytes());
parser.nextToken(); // start of object
diff --git a/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java b/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java
index 4d9ff6dfc64..b9d935c4628 100644
--- a/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java
+++ b/core/src/test/java/org/elasticsearch/test/ESIntegTestCase.java
@@ -325,29 +325,21 @@ public abstract class ESIntegTestCase extends ESTestCase {
}
protected final void beforeInternal() throws Exception {
- assert Thread.getDefaultUncaughtExceptionHandler() instanceof ElasticsearchUncaughtExceptionHandler;
- try {
- final Scope currentClusterScope = getCurrentClusterScope();
- switch (currentClusterScope) {
- case SUITE:
- assert SUITE_SEED != null : "Suite seed was not initialized";
- currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED);
- break;
- case TEST:
- currentCluster = buildAndPutCluster(currentClusterScope, randomLong());
- break;
- default:
- fail("Unknown Scope: [" + currentClusterScope + "]");
- }
- cluster().beforeTest(getRandom(), getPerTestTransportClientRatio());
- cluster().wipe(excludeTemplates());
- randomIndexTemplate();
- } catch (OutOfMemoryError e) {
- if (e.getMessage().contains("unable to create new native thread")) {
- ESTestCase.printStackDump(logger);
- }
- throw e;
+ final Scope currentClusterScope = getCurrentClusterScope();
+ switch (currentClusterScope) {
+ case SUITE:
+ assert SUITE_SEED != null : "Suite seed was not initialized";
+ currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED);
+ break;
+ case TEST:
+ currentCluster = buildAndPutCluster(currentClusterScope, randomLong());
+ break;
+ default:
+ fail("Unknown Scope: [" + currentClusterScope + "]");
}
+ cluster().beforeTest(getRandom(), getPerTestTransportClientRatio());
+ cluster().wipe(excludeTemplates());
+ randomIndexTemplate();
}
private void printTestMessage(String message) {
diff --git a/core/src/test/java/org/elasticsearch/test/ESTestCase.java b/core/src/test/java/org/elasticsearch/test/ESTestCase.java
index 78d004f43e4..c5e87f4a689 100644
--- a/core/src/test/java/org/elasticsearch/test/ESTestCase.java
+++ b/core/src/test/java/org/elasticsearch/test/ESTestCase.java
@@ -145,20 +145,6 @@ public abstract class ESTestCase extends LuceneTestCase {
PathUtilsForTesting.teardown();
}
- // setup a default exception handler which knows when and how to print a stacktrace
- private static Thread.UncaughtExceptionHandler defaultHandler;
-
- @BeforeClass
- public static void setDefaultExceptionHandler() throws Exception {
- defaultHandler = Thread.getDefaultUncaughtExceptionHandler();
- Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(defaultHandler));
- }
-
- @AfterClass
- public static void restoreDefaultExceptionHandler() throws Exception {
- Thread.setDefaultUncaughtExceptionHandler(defaultHandler);
- }
-
// randomize content type for request builders
@BeforeClass
@@ -551,60 +537,6 @@ public abstract class ESTestCase extends LuceneTestCase {
return builder;
}
- // -----------------------------------------------------------------
- // Failure utilities
- // -----------------------------------------------------------------
-
- static final class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
-
- private final Thread.UncaughtExceptionHandler parent;
- private final ESLogger logger = Loggers.getLogger(getClass());
-
- private ElasticsearchUncaughtExceptionHandler(Thread.UncaughtExceptionHandler parent) {
- this.parent = parent;
- }
-
- @Override
- public void uncaughtException(Thread t, Throwable e) {
- if (e instanceof EsRejectedExecutionException) {
- if (e.getMessage() != null && ((EsRejectedExecutionException) e).isExecutorShutdown()) {
- return; // ignore the EsRejectedExecutionException when a node shuts down
- }
- } else if (e instanceof OutOfMemoryError) {
- if (e.getMessage() != null && e.getMessage().contains("unable to create new native thread")) {
- printStackDump(logger);
- }
- }
- parent.uncaughtException(t, e);
- }
- }
-
- protected static final void printStackDump(ESLogger logger) {
- // print stack traces if we can't create any native thread anymore
- Map allStackTraces = Thread.getAllStackTraces();
- logger.error(formatThreadStacks(allStackTraces));
- }
-
- /** Dump threads and their current stack trace. */
- public static String formatThreadStacks(Map threads) {
- StringBuilder message = new StringBuilder();
- int cnt = 1;
- final Formatter f = new Formatter(message, Locale.ENGLISH);
- for (Map.Entry e : threads.entrySet()) {
- if (e.getKey().isAlive()) {
- f.format(Locale.ENGLISH, "\n %2d) %s", cnt++, threadName(e.getKey())).flush();
- }
- if (e.getValue().length == 0) {
- message.append("\n at (empty stack)");
- } else {
- for (StackTraceElement ste : e.getValue()) {
- message.append("\n at ").append(ste);
- }
- }
- }
- return message.toString();
- }
-
private static String threadName(Thread t) {
return "Thread[" +
"id=" + t.getId() +
diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
index cacf6cd313b..6cafe4c3b79 100644
--- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
+++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java
@@ -62,8 +62,8 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.http.HttpServerTransport;
+import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
-import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard;
@@ -446,11 +446,11 @@ public final class InternalTestCluster extends TestCluster {
}
if (random.nextBoolean()) {
- builder.put(IndexCacheModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexCacheModule.INDEX_QUERY_CACHE : IndexCacheModule.NONE_QUERY_CACHE);
+ builder.put(IndexModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE);
}
if (random.nextBoolean()) {
- builder.put(IndexCacheModule.QUERY_CACHE_EVERYTHING, random.nextBoolean());
+ builder.put(IndexModule.QUERY_CACHE_EVERYTHING, random.nextBoolean());
}
if (random.nextBoolean()) {
@@ -1873,7 +1873,7 @@ public final class InternalTestCluster extends TestCluster {
}
NodeService nodeService = getInstanceFromNode(NodeService.class, nodeAndClient.node);
- NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false);
+ NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false, false);
assertThat("Fielddata size must be 0 on node: " + stats.getNode(), stats.getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l));
assertThat("Query cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0l));
assertThat("FixedBitSet cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
diff --git a/core/src/test/java/org/elasticsearch/test/TestCluster.java b/core/src/test/java/org/elasticsearch/test/TestCluster.java
index 60fb248420d..c0d98ff301a 100644
--- a/core/src/test/java/org/elasticsearch/test/TestCluster.java
+++ b/core/src/test/java/org/elasticsearch/test/TestCluster.java
@@ -153,12 +153,6 @@ public abstract class TestCluster implements Iterable, Closeable {
assertAcked(client().admin().indices().prepareDelete(concreteIndices.toArray(String.class)));
}
}
- } catch (AssertionError ae) {
- // Try to see what threads are doing when we hit the "Delete index failed - not acked":
- logger.info("dump all threads on AssertionError");
- ESTestCase.printStackDump(logger);
- logger.info("done dump all threads on AssertionError");
- throw ae;
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomGeoGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomGeoGenerator.java
index 23c7e5c64ca..ad94c4e5ab4 100644
--- a/core/src/test/java/org/elasticsearch/test/geo/RandomGeoGenerator.java
+++ b/core/src/test/java/org/elasticsearch/test/geo/RandomGeoGenerator.java
@@ -19,7 +19,7 @@
package org.elasticsearch.test.geo;
-import org.apache.lucene.util.XGeoUtils;
+import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.common.geo.GeoPoint;
import java.util.Random;
@@ -42,8 +42,8 @@ public class RandomGeoGenerator {
assert pt != null && pt.length == 2;
// normalize min and max
- double[] min = {XGeoUtils.normalizeLon(minLon), XGeoUtils.normalizeLat(minLat)};
- double[] max = {XGeoUtils.normalizeLon(maxLon), XGeoUtils.normalizeLat(maxLat)};
+ double[] min = {GeoUtils.normalizeLon(minLon), GeoUtils.normalizeLat(minLat)};
+ double[] max = {GeoUtils.normalizeLon(maxLon), GeoUtils.normalizeLat(maxLat)};
final double[] tMin = new double[2];
final double[] tMax = new double[2];
tMin[0] = Math.min(min[0], max[0]);
diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
index e8dda96255b..ddea8145d07 100644
--- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
+++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
@@ -19,7 +19,6 @@
package org.elasticsearch.test.geo;
-import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.spatial4j.core.context.jts.JtsSpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
@@ -30,6 +29,7 @@ import com.spatial4j.core.shape.impl.Range;
import com.vividsolutions.jts.algorithm.ConvexHull;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
+
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.geo.builders.BaseLineStringBuilder;
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
@@ -40,6 +40,7 @@ import org.elasticsearch.common.geo.builders.PointBuilder;
import org.elasticsearch.common.geo.builders.PointCollection;
import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
+import org.junit.Assert;
import java.util.Random;
@@ -251,7 +252,7 @@ public class RandomShapeGenerator extends RandomGeoGenerator {
double[] pt = new double[2];
randomPointIn(rand, r.getMinX(), r.getMinY(), r.getMaxX(), r.getMaxY(), pt);
Point p = ctx.makePoint(pt[0], pt[1]);
- RandomizedTest.assertEquals(CONTAINS, r.relate(p));
+ Assert.assertEquals(CONTAINS, r.relate(p));
return p;
}
diff --git a/core/src/test/resources/indices/bwc/index-2.0.0.zip b/core/src/test/resources/indices/bwc/index-2.0.0.zip
new file mode 100644
index 00000000000..7110fb424a8
Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.0.0.zip differ
diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0.zip b/core/src/test/resources/indices/bwc/repo-2.0.0.zip
new file mode 100644
index 00000000000..9605830a12c
Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.0.0.zip differ
diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl
index c96645c1d6e..8033eddd03a 100644
--- a/dev-tools/es_release_notes.pl
+++ b/dev-tools/es_release_notes.pl
@@ -35,7 +35,7 @@ my %Group_Labels = (
breaking => 'Breaking changes',
build => 'Build',
deprecation => 'Deprecations',
- doc => 'Docs',
+ docs => 'Docs',
feature => 'New features',
enhancement => 'Enhancements',
bug => 'Bug fixes',
diff --git a/dev-tools/src/main/resources/forbidden/all-signatures.txt b/dev-tools/src/main/resources/forbidden/all-signatures.txt
index 2df41241598..4233112f0ab 100644
--- a/dev-tools/src/main/resources/forbidden/all-signatures.txt
+++ b/dev-tools/src/main/resources/forbidden/all-signatures.txt
@@ -97,3 +97,6 @@ java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObj
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
+
+@defaultMessage this method needs special permission
+java.lang.Thread#getAllStackTraces()
diff --git a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 70aab5b26fb..00000000000
--- a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-16aa0bdb66b7471e9a26f78a9a5701f678a905db
diff --git a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..1dff1091f02
--- /dev/null
+++ b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+584673fa6187890af89deab81df6a8651651fa2a
diff --git a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 49444103062..00000000000
--- a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7f16ec8294a09118237817d8c9c03b87cec67e29
diff --git a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..f737f98bd2a
--- /dev/null
+++ b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+f3b0911633d657e49d7a00df0eb5da5a7f65f61b
diff --git a/distribution/licenses/lucene-core-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-core-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 237fe0d2597..00000000000
--- a/distribution/licenses/lucene-core-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0a2305ed749cb8abc321ee50b871097b4bda8a64
diff --git a/distribution/licenses/lucene-core-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-core-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..89720a70e22
--- /dev/null
+++ b/distribution/licenses/lucene-core-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+d4ac9f13091eabf5cc0b13bd995dc2c161771139
diff --git a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 9f42b06abd6..00000000000
--- a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-51ed4b60eddd2ce38fcdc8c89903c1ece336ab4f
diff --git a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..ac14c1412ed
--- /dev/null
+++ b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+9499d90d3db187210f9991ab0a92d48423ba3d4e
diff --git a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 164bfa24035..00000000000
--- a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d71c018d888dc5fe71c7fb20c2a6009c36ef117f
diff --git a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..d87ebcc05a1
--- /dev/null
+++ b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+7daf49e720499e43d9b44b588526eb750ea2e83a
diff --git a/distribution/licenses/lucene-join-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-join-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 029fd0523d9..00000000000
--- a/distribution/licenses/lucene-join-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e82bc11bccb55af5d88d5526abe9bd3d04f0d13
diff --git a/distribution/licenses/lucene-join-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-join-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..23b7f1816a5
--- /dev/null
+++ b/distribution/licenses/lucene-join-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+e1a36360e967bf3116a4271d4b04aa5bdcc235ca
diff --git a/distribution/licenses/lucene-memory-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-memory-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 26f5148a59e..00000000000
--- a/distribution/licenses/lucene-memory-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0dfec35d41e7120e48cf6d0ae16a88ef2949e778
diff --git a/distribution/licenses/lucene-memory-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-memory-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..e915f6a8201
--- /dev/null
+++ b/distribution/licenses/lucene-memory-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+78b2fe81fe90c2d45ace3f21c7915319fe92119b
diff --git a/distribution/licenses/lucene-misc-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-misc-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 004205b8f61..00000000000
--- a/distribution/licenses/lucene-misc-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-30ddb3175c08d443c5a9f74e50e50e3a95afa72d
diff --git a/distribution/licenses/lucene-misc-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-misc-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..82ee83a3370
--- /dev/null
+++ b/distribution/licenses/lucene-misc-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+8c7734673fbdfa7ae251b29a7bee7842b6450606
diff --git a/distribution/licenses/lucene-queries-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-queries-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 8ae9588eaf6..00000000000
--- a/distribution/licenses/lucene-queries-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e45b31ff8374ac5dc651ac2477dde5299b7e746
diff --git a/distribution/licenses/lucene-queries-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-queries-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..54b54e2f168
--- /dev/null
+++ b/distribution/licenses/lucene-queries-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+f2ce93847617b42c98fc44a979697ba8f6e3f693
diff --git a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index e6fd31e99fa..00000000000
--- a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ff0780a5ad9620036f80114b2ce0b30d25647a62
diff --git a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..96e3bac9f19
--- /dev/null
+++ b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+bf99b0920e7d5cdddeddb0181ffad7df9e557ebb
diff --git a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index e687bd7dca0..00000000000
--- a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7679111888ccd185db0b360954777e68364eb88a
diff --git a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..3e20f766ece
--- /dev/null
+++ b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+89cd7591008f10ceeb88fe87c52ea5f96754ad94
diff --git a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index a649c94eea7..00000000000
--- a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a1699a0ed320c4db66ae4e8dc9dfd80e4bfc4017
diff --git a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..3c7db1eac3b
--- /dev/null
+++ b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+55817ab7fc4b2980429aa6ced151affe7740eb44
diff --git a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index c7f799d4b62..00000000000
--- a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7bcd15e2e685a8c92f48c3d2f355d2dd63073420
diff --git a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..b933d7a64ef
--- /dev/null
+++ b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+ef817826ffec2b506672ba5038f4e396a7bfcdc7
diff --git a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1708254.jar.sha1 b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 3382df6a549..00000000000
--- a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-be74ad2360527cca776379499254f355ccd93484
diff --git a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1710880.jar.sha1 b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..b448901cd51
--- /dev/null
+++ b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+5b39ae55fa40709cc45d5925ad80d09cb0cdc4ba
diff --git a/distribution/licenses/spatial4j-0.4.1.jar.sha1 b/distribution/licenses/spatial4j-0.4.1.jar.sha1
deleted file mode 100644
index 1c2883bd830..00000000000
--- a/distribution/licenses/spatial4j-0.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4234d12b1ba4d4b539fb3e29edd948a99539d9eb
diff --git a/distribution/licenses/spatial4j-0.5.jar.sha1 b/distribution/licenses/spatial4j-0.5.jar.sha1
new file mode 100644
index 00000000000..4bcf7a33b15
--- /dev/null
+++ b/distribution/licenses/spatial4j-0.5.jar.sha1
@@ -0,0 +1 @@
+6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3
\ No newline at end of file
diff --git a/distribution/licenses/spatial4j-ABOUT.txt b/distribution/licenses/spatial4j-ABOUT.txt
new file mode 100644
index 00000000000..bee50a2b943
--- /dev/null
+++ b/distribution/licenses/spatial4j-ABOUT.txt
@@ -0,0 +1,15 @@
+About This Content
+
+May 22, 2015
+
+License
+
+The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the
+Content is provided to you under the terms and conditions of the Apache License, Version 2.0. A copy of the Apache
+License, Version 2.0 is available at http://www.apache.org/licenses/LICENSE-2.0.txt
+
+If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another
+party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content.
+Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the
+Redistributor. Unless otherwise indicated below, the terms and conditions of the Apache License, Version 2.0 still apply
+to any source code in the Content and such source code may be obtained at http://www.eclipse.org](http://www.eclipse.org.
\ No newline at end of file
diff --git a/distribution/licenses/spatial4j-NOTICE.txt b/distribution/licenses/spatial4j-NOTICE.txt
index 8d1c8b69c3f..a8be036a412 100644
--- a/distribution/licenses/spatial4j-NOTICE.txt
+++ b/distribution/licenses/spatial4j-NOTICE.txt
@@ -1 +1,100 @@
-
+Eclipse Foundation Software User Agreement
+
+April 9, 2014
+
+Usage Of Content
+
+THE ECLIPSE FOUNDATION MAKES AVAILABLE SOFTWARE, DOCUMENTATION, INFORMATION AND/OR OTHER MATERIALS FOR OPEN SOURCE
+PROJECTS (COLLECTIVELY "CONTENT"). USE OF THE CONTENT IS GOVERNED BY THE TERMS AND CONDITIONS OF THIS AGREEMENT AND/OR
+THE TERMS AND CONDITIONS OF LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. BY USING THE CONTENT, YOU AGREE
+THAT YOUR USE OF THE CONTENT IS GOVERNED BY THIS AGREEMENT AND/OR THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE
+AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. IF YOU DO NOT AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT
+AND THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW, THEN YOU MAY
+NOT USE THE CONTENT.
+
+Applicable Licenses
+
+Unless otherwise indicated, all Content made available by the Eclipse Foundation is provided to you under the terms and
+conditions of the Eclipse Public License Version 1.0 ("EPL"). A copy of the EPL is provided with this Content and is
+also available at http://www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" will mean the Content.
+
+Content includes, but is not limited to, source code, object code, documentation and other files maintained in the
+Eclipse Foundation source code repository ("Repository") in software modules ("Modules") and made available as
+downloadable archives ("Downloads").
+
+* Content may be structured and packaged into modules to facilitate delivering, extending, and upgrading the Content.
+ Typical modules may include plug-ins ("Plug-ins"), plug-in fragments ("Fragments"), and features ("Features").
+* Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Javaâ„¢ ARchive) in a directory named "plugins".
+* A Feature is a bundle of one or more Plug-ins and/or Fragments and associated material. Each Feature may be packaged
+ as a sub-directory in a directory named "features". Within a Feature, files named "feature.xml" may contain a list
+ of the names and version numbers of the Plug-ins and/or Fragments associated with that Feature.
+* Features may also include other Features ("Included Features"). Within a Feature, files named "feature.xml" may
+ contain a list of the names and version numbers of Included Features.
+
+The terms and conditions governing Plug-ins and Fragments should be contained in files named "about.html" ("Abouts").
+The terms and conditions governing Features and Included Features should be contained in files named "license.html"
+("Feature Licenses"). Abouts and Feature Licenses may be located in any directory of a Download or Module including, but
+not limited to the following locations:
+
+* The top-level (root) directory
+* Plug-in and Fragment directories
+* Inside Plug-ins and Fragments packaged as JARs
+* Sub-directories of the directory named "src" of certain Plug-ins
+* Feature directories
+
+Note: if a Feature made available by the Eclipse Foundation is installed using the Provisioning Technology (as defined
+below), you must agree to a license ("Feature Update License") during the installation process. If the Feature contains
+Included Features, the Feature Update License should either provide you with the terms and conditions governing the
+Included Features or inform you where you can locate them. Feature Update Licenses may be found in the "license"
+property of files named "feature.properties" found within a Feature. Such Abouts, Feature Licenses, and Feature Update
+Licenses contain the terms and conditions (or references to such terms and conditions) that govern your use of the
+associated Content in that directory.
+
+THE ABOUTS, FEATURE LICENSES, AND FEATURE UPDATE LICENSES MAY REFER TO THE EPL OR OTHER LICENSE AGREEMENTS, NOTICES OR
+TERMS AND CONDITIONS. SOME OF THESE OTHER LICENSE AGREEMENTS MAY INCLUDE (BUT ARE NOT LIMITED TO):
+
+* Eclipse Distribution License Version 1.0 (available at http://www.eclipse.org/licenses/edl-v10.html)
+* Common Public License Version 1.0 (available at http://www.eclipse.org/legal/cpl-v10.html)
+* Apache Software License 1.1 (available at http://www.apache.org/licenses/LICENSE)
+* Apache Software License 2.0 (available at http://www.apache.org/licenses/LICENSE-2.0)
+* Mozilla Public License Version 1.1 (available at http://www.mozilla.org/MPL/MPL-1.1.html)
+
+IT IS YOUR OBLIGATION TO READ AND ACCEPT ALL SUCH TERMS AND CONDITIONS PRIOR TO USE OF THE CONTENT. If no About, Feature
+License, or Feature Update License is provided, please contact the Eclipse Foundation to determine what terms and
+conditions govern that particular Content.
+
+### Use of Provisioning Technology
+
+The Eclipse Foundation makes available provisioning software, examples of which include, but are not limited to, p2 and
+the Eclipse Update Manager ("Provisioning Technology") for the purpose of allowing users to install software,
+documentation, information and/or other materials (collectively "Installable Software"). This capability is provided
+with the intent of allowing such users to install, extend and update Eclipse-based products. Information about packaging
+Installable Software is available at http://eclipse.org/equinox/p2/repository_packaging.html ("Specification").
+
+You may use Provisioning Technology to allow other parties to install Installable Software. You shall be responsible for
+enabling the applicable license agreements relating to the Installable Software to be presented to, and accepted by, the
+users of the Provisioning Technology in accordance with the Specification. By using Provisioning Technology in such a
+manner and making it available in accordance with the Specification, you further acknowledge your agreement to, and the
+acquisition of all necessary rights to permit the following:
+
+1. A series of actions may occur ("Provisioning Process") in which a user may execute the Provisioning Technology on a
+ machine ("Target Machine") with the intent of installing, extending or updating the functionality of an
+ Eclipse-based product.
+2. During the Provisioning Process, the Provisioning Technology may cause third party Installable Software or a portion
+ thereof to be accessed and copied to the Target Machine.
+3. Pursuant to the Specification, you will provide to the user the terms and conditions that govern the use of the
+ Installable Software ("Installable Software Agreement") and such Installable Software Agreement shall be accessed
+ from the Target Machine in accordance with the Specification. Such Installable Software Agreement must inform the
+ user of the terms and conditions that govern the Installable Software and must solicit acceptance by the end user in
+ the manner prescribed in such Installable Software Agreement. Upon such indication of agreement by the user, the
+ provisioning Technology will complete installation of the Installable Software.
+
+Cryptography
+
+Content may contain encryption software. The country in which you are currently may have restrictions on the import,
+possession, and use, and/or re-export to another country, of encryption software. BEFORE using any encryption software,
+please check the country's laws, regulations and policies concerning the import, possession, or use, and re-export of
+encryption software, to see if this is permitted.
+
+Java and all Java-based trademarks are trademarks of Oracle Corporation in the United States, other countries,
+or both.
\ No newline at end of file
diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc
index b22312e2130..9890164bff5 100644
--- a/docs/reference/cluster/nodes-stats.asciidoc
+++ b/docs/reference/cluster/nodes-stats.asciidoc
@@ -57,6 +57,9 @@ of `indices`, `os`, `process`, `jvm`, `transport`, `http`,
`breaker`::
Statistics about the field data circuit breaker
+`discovery`::
+ Statistics about the discovery
+
[source,js]
--------------------------------------------------
# return indices and os
diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc
index 9a00d021cfb..30c1c38aae8 100644
--- a/docs/reference/migration/migrate_3_0.asciidoc
+++ b/docs/reference/migration/migrate_3_0.asciidoc
@@ -68,6 +68,48 @@ characteristics as the former `scan` search type.
The search exists api has been removed in favour of using the search api with
`size` set to `0` and `terminate_after` set to `1`.
+==== Deprecated queries removed
+
+The following deprecated queries have been removed:
+* `filtered`: use `bool` query instead, which supports `filter` clauses too
+* `and`: use `must` clauses in a `bool` query instead
+* `or`: use should clauses in a `bool` query instead
+* `limit`: use `terminate_after` parameter instead
+* `fquery`: obsolete after filters and queries have been merged
+* `query`: obsolete after filters and queries have been merged
+
+==== Unified fuzziness parameter
+
+* Removed support for the deprecated `min_similarity` parameter in `fuzzy query`, in favour of `similarity`.
+* Removed support for the deprecated `fuzzy_min_sim` parameter in `query_string` query, in favour of `similarity`.
+* Removed support for the deprecated `edit_distance` parameter in completion suggester, in favour of `similarity`.
+
+==== indices query
+
+Removed support for the deprecated `filter` and `no_match_filter` fields in `indices` query,
+in favour of `query` and `no_match_query`.
+
+==== nested query
+
+Removed support for the deprecated `filter` fields in `nested` query, in favour of `query`.
+
+==== terms query
+
+Removed support for the deprecated `minimum_should_match` and `disable_coord` in `terms` query, use `bool` query instead.
+Removed also support for the deprecated `execution` parameter.
+
+==== function_score query
+
+Removed support for the top level `filter` element in `function_score` query, replaced by `query`.
+
+==== highlighters
+
+Removed support for multiple highlighter names, the only supported ones are: `plain`, `fvh` and `postings`.
+
+==== top level filter
+
+Removed support for the deprecated top level `filter` in the search api, replaced by `post_filter`.
+
=== Parent/Child changes
The `children` aggregation, parent child inner hits and `has_child` and `has_parent` queries will not work on indices
@@ -85,7 +127,7 @@ which does the exact same thing.
==== `sum` score mode removed
-The `sum` score mode has been removed in favour of the `total` mode which doesn the same and is already available in
+The `sum` score mode has been removed in favour of the `total` mode which does the same and is already available in
previous versions.
==== `max_children` option
diff --git a/docs/reference/query-dsl/indices-query.asciidoc b/docs/reference/query-dsl/indices-query.asciidoc
index feaa95db6ac..e3b604b7a39 100644
--- a/docs/reference/query-dsl/indices-query.asciidoc
+++ b/docs/reference/query-dsl/indices-query.asciidoc
@@ -28,11 +28,3 @@ You can use the `index` field to provide a single index.
documents), and `all` (to match all). Defaults to `all`.
`query` is mandatory, as well as `indices` (or `index`).
-
-[TIP]
-====================================================================
-The fields order is important: if the `indices` are provided before `query`
-or `no_match_query`, the related queries get parsed only against the indices
-that they are going to be executed on. This is useful to avoid parsing queries
-when it is not necessary and prevent potential mapping errors.
-====================================================================
diff --git a/docs/reference/query-dsl/nested-query.asciidoc b/docs/reference/query-dsl/nested-query.asciidoc
index d32705a0a7a..51f690c2cab 100644
--- a/docs/reference/query-dsl/nested-query.asciidoc
+++ b/docs/reference/query-dsl/nested-query.asciidoc
@@ -45,9 +45,9 @@ And here is a sample nested query usage:
}
--------------------------------------------------
-The query `path` points to the nested object path, and the `query` (or
-`filter`) includes the query that will run on the nested docs matching
-the direct path, and joining with the root parent docs. Note that any
+The query `path` points to the nested object path, and the `query`
+includes the query that will run on the nested docs matching the
+direct path, and joining with the root parent docs. Note that any
fields referenced inside the query must use the complete path (fully
qualified).
diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc
index e8b01acfd96..4cb43069086 100644
--- a/docs/reference/redirects.asciidoc
+++ b/docs/reference/redirects.asciidoc
@@ -362,7 +362,7 @@ The filter cache has been renamed <>.
[role="exclude",id="query-dsl-filtered-query"]
=== Filtered query
-The `filtered` query is replaced in favour of the <> query. Instead of
+The `filtered` query is replaced by the <> query. Instead of
the following:
[source,js]
diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc
index 7a466405789..99742db77c1 100644
--- a/docs/reference/search/request/highlighting.asciidoc
+++ b/docs/reference/search/request/highlighting.asciidoc
@@ -2,9 +2,9 @@
=== Highlighting
Allows to highlight search results on one or more fields. The
-implementation uses either the lucene `highlighter`, `fast-vector-highlighter`
-or `postings-highlighter`. The following is an example of the search request
-body:
+implementation uses either the lucene `plain` highlighter, the
+fast vector highlighter (`fvh`) or `postings` highlighter.
+The following is an example of the search request body:
[source,js]
--------------------------------------------------
@@ -285,7 +285,7 @@ is required. Note that `fragment_size` is ignored in this case.
}
--------------------------------------------------
-When using `fast-vector-highlighter` one can use `fragment_offset`
+When using `fvh` one can use `fragment_offset`
parameter to control the margin to start highlighting from.
In the case where there is no matching fragment to highlight, the default is
@@ -554,7 +554,7 @@ to
[[phrase-limit]]
==== Phrase Limit
-The `fast-vector-highlighter` has a `phrase_limit` parameter that prevents
+The fast vector highlighter has a `phrase_limit` parameter that prevents
it from analyzing too many phrases and eating tons of memory. It defaults
to 256 so only the first 256 matching phrases in the document scored
considered. You can raise the limit with the `phrase_limit` parameter but
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1708254.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 53f66772d49..00000000000
--- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0ffcda4683ae66ecb96882a6809516d9a288ba52
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1710880.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..25f0322e755
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+43979949bebc071fc0353513fffe11684690f23e
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1708254.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 3d9bc8eb84b..00000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9e34274dd0f1903453f8e4f76ee8e88f15437752
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1710880.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..973cab1d2d6
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+d621f00b5ce0f9fde87a713e932d888c3ddd1a78
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1708254.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 730e93dae69..00000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8020f9e0a9c3f30fe1989dcac2085134385a9e93
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1710880.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..8034c3c8fdc
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+d46a1cd06ae642581e566844b1e42e14e0eeffe6
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1708254.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 7b01fa0569c..00000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f8c9a0775dd92a2e537e0e19fc1831b3214eeef5
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1710880.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..3855bcfe769
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+8e6058a95f38637c1d4b7a1ebcc6c8ce85c80b20
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1708254.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index b24c486616a..00000000000
--- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a3111a7cff126498ce361d56e363bcf8bee945a9
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1710880.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..c1e15a2e832
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+4c69ca398b34c7a58482b09cdc06d0e2bab89cc4
diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java
index 71349648138..2b8dc02289c 100644
--- a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java
+++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java
@@ -20,16 +20,12 @@
package org.elasticsearch.rest.action.deletebyquery;
import org.elasticsearch.action.deletebyquery.DeleteByQueryRequest;
-import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
@@ -67,29 +63,15 @@ public class RestDeleteByQueryAction extends BaseRestHandler {
if (request.hasParam("timeout")) {
delete.timeout(request.paramAsTime("timeout", null));
}
- if (request.hasContent()) {
- XContentParser requestParser = XContentFactory.xContent(request.content()).createParser(request.content());
- QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
- context.reset(requestParser);
- context.parseFieldMatcher(parseFieldMatcher);
- final QueryBuilder> builder = context.parseInnerQueryBuilder();
- delete.query(builder);
+ if (RestActions.hasBodyContent(request)) {
+ delete.query(RestActions.getQueryContent(RestActions.getRestContent(request), indicesQueriesRegistry, parseFieldMatcher));
} else {
- String source = request.param("source");
- if (source != null) {
- XContentParser requestParser = XContentFactory.xContent(source).createParser(source);
- QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
- context.reset(requestParser);
- final QueryBuilder> builder = context.parseInnerQueryBuilder();
- delete.query(builder);
- } else {
- QueryBuilder> queryBuilder = RestActions.urlParamsToQueryBuilder(request);
- if (queryBuilder != null) {
- delete.query(queryBuilder);
- }
+ QueryBuilder> queryBuilder = RestActions.urlParamsToQueryBuilder(request);
+ if (queryBuilder != null) {
+ delete.query(queryBuilder);
}
}
delete.types(Strings.splitStringByCommaToArray(request.param("type")));
- client.execute(INSTANCE, delete, new RestToXContentListener(channel));
+ client.execute(INSTANCE, delete, new RestToXContentListener<>(channel));
}
}
diff --git a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml
index c253ad8d276..063e959a807 100644
--- a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml
+++ b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml
@@ -1,5 +1,4 @@
----
-"Basic delete_by_query":
+setup:
- do:
index:
index: test_1
@@ -24,6 +23,8 @@
- do:
indices.refresh: {}
+---
+"Basic delete_by_query":
- do:
delete_by_query:
index: test_1
@@ -40,3 +41,14 @@
index: test_1
- match: { count: 2 }
+
+---
+"Delete_by_query body without query element":
+ - do:
+ catch: request
+ delete_by_query:
+ index: test_1
+ body:
+ match:
+ foo: bar
+
diff --git a/plugins/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/plugins/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1
new file mode 100644
index 00000000000..f15e50069ba
--- /dev/null
+++ b/plugins/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1
@@ -0,0 +1 @@
+66144204f9d6d7d3f3f775622c2dd7e9bd511d97
diff --git a/plugins/lang-expression/licenses/antlr4-runtime-4.5.jar.sha1 b/plugins/lang-expression/licenses/antlr4-runtime-4.5.jar.sha1
deleted file mode 100644
index 5299c19c73b..00000000000
--- a/plugins/lang-expression/licenses/antlr4-runtime-4.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-29e48af049f17dd89153b83a7ad5d01b3b4bcdda
diff --git a/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1708254.jar.sha1 b/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1708254.jar.sha1
deleted file mode 100644
index 83d9a5e5c3b..00000000000
--- a/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1708254.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-40f4f54812a7a34312519ceb8b3f128feb4e2185
diff --git a/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1710880.jar.sha1 b/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1710880.jar.sha1
new file mode 100644
index 00000000000..6f2d485fdb1
--- /dev/null
+++ b/plugins/lang-expression/licenses/lucene-expressions-5.4.0-snapshot-1710880.jar.sha1
@@ -0,0 +1 @@
+431504b7bad8ffc1a03707b9a1531d95f33e10b9
diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java
index 104159142ad..33a0df09bd2 100644
--- a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java
+++ b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.messy.tests;
-import org.apache.lucene.util.XGeoHashUtils;
+import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoDistance;
@@ -664,7 +664,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
- .field("pin", XGeoHashUtils.stringEncode(lon, lat))
+ .field("pin", GeoHashUtils.stringEncode(lon, lat))
.endObject();
assertAcked(prepareCreate("locations").addMapping("location", mapping));
diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java
index 15c77960ead..4291f00bf1a 100644
--- a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java
+++ b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java
@@ -76,7 +76,6 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.suggest.SuggestRequest;
-import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsAction;
@@ -96,33 +95,16 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.Transport;
-import org.elasticsearch.transport.TransportChannel;
-import org.elasticsearch.transport.TransportModule;
-import org.elasticsearch.transport.TransportRequest;
-import org.elasticsearch.transport.TransportRequestHandler;
-import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.*;
import org.junit.After;
import org.junit.Before;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
import java.util.function.Supplier;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
-import static org.hamcrest.Matchers.emptyIterable;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.hasItem;
-import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.*;
@ClusterScope(scope = Scope.SUITE, numClientNodes = 1, minNumDataNodes = 2)
public class IndicesRequestTests extends ESIntegTestCase {
@@ -307,7 +289,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
String explainShardAction = ExplainAction.NAME + "[s]";
interceptTransportActions(explainShardAction);
- ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").source(new QuerySourceBuilder().setQuery(QueryBuilders.matchAllQuery()));
+ ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").query(QueryBuilders.matchAllQuery());
internalCluster().clientNodeClient().explain(explainRequest).actionGet();
clearInterceptedActions();
@@ -684,24 +666,6 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
}
}
-
- private static void assertSameIndicesOptionalRequests(String[] indices, String... actions) {
- assertSameIndices(indices, true, actions);
- }
-
- private static void assertSameIndices(String[] indices, boolean optional, String... actions) {
- for (String action : actions) {
- List requests = consumeTransportRequests(action);
- if (!optional) {
- assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0));
- }
- for (TransportRequest internalRequest : requests) {
- assertThat(internalRequest, instanceOf(IndicesRequest.class));
- assertThat(internalRequest.getClass().getName(), ((IndicesRequest)internalRequest).indices(), equalTo(indices));
- }
- }
- }
-
private static void assertIndicesSubset(List indices, String... actions) {
//indices returned by each bulk shard request need to be a subset of the original indices
for (String action : actions) {
@@ -820,26 +784,26 @@ public class IndicesRequestTests extends ESIntegTestCase {
@Override
public void registerRequestHandler(String action, Supplier request, String executor, boolean forceExecution, TransportRequestHandler handler) {
- super.registerRequestHandler(action, request, executor, forceExecution, new InterceptingRequestHandler(action, handler));
+ super.registerRequestHandler(action, request, executor, forceExecution, new InterceptingRequestHandler<>(action, handler));
}
@Override
public void registerRequestHandler(String action, Supplier requestFactory, String executor, TransportRequestHandler handler) {
- super.registerRequestHandler(action, requestFactory, executor, new InterceptingRequestHandler(action, handler));
+ super.registerRequestHandler(action, requestFactory, executor, new InterceptingRequestHandler<>(action, handler));
}
- private class InterceptingRequestHandler implements TransportRequestHandler {
+ private class InterceptingRequestHandler implements TransportRequestHandler