Merge branch 'master' into feature/aggs_2_0
This commit is contained in:
commit
7d38989cdb
|
@ -1,2 +1,7 @@
|
|||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding//src/main/resources=UTF-8
|
||||
encoding//src/test/java=UTF-8
|
||||
encoding//src/test/resources=UTF-8
|
||||
encoding/<project>=UTF-8
|
||||
encoding/rest-api-spec=UTF-8
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
eclipse.preferences.version=1
|
||||
# We target Java 1.7
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
||||
# Lines should be splitted at 140 chars
|
||||
org.eclipse.jdt.core.formatter.lineSplit=140
|
||||
# Indentation is 4 spaces
|
||||
org.eclipse.jdt.core.formatter.tabulation.char=space
|
||||
org.eclipse.jdt.core.formatter.tabulation.size=4
|
||||
# Configuration for NPE analysis
|
||||
org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=enabled
|
||||
org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
|
||||
org.eclipse.jdt.core.compiler.annotation.nullable=org.elasticsearch.common.Nullable
|
||||
org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
|
||||
org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning
|
||||
org.eclipse.jdt.core.compiler.problem.nullReference=warning
|
||||
org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning
|
||||
org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
|
||||
org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
||||
org.eclipse.jdt.core.formatter.lineSplit=140
|
||||
org.eclipse.jdt.core.formatter.tabulation.char=space
|
||||
org.eclipse.jdt.core.formatter.tabulation.size=4
|
||||
|
|
|
@ -81,6 +81,8 @@ grant {
|
|||
permission java.lang.RuntimePermission "accessClassInPackage.sun.nio.ch";
|
||||
// needed by groovy engine
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
|
||||
// needed to get file descriptor statistics
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.management";
|
||||
|
||||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
permission java.lang.RuntimePermission "getStackTrace";
|
||||
|
|
|
@ -230,8 +230,7 @@ generates an edit distance based on the length of the term. For lengths:
|
|||
|
||||
converted into an edit distance using the formula: `length(term) * (1.0 -
|
||||
fuzziness)`, eg a `fuzziness` of `0.6` with a term of length 10 would result
|
||||
in an edit distance of `4`. Note: in all APIs except for the
|
||||
<<query-dsl-flt-query>>, the maximum allowed edit distance is `2`.
|
||||
in an edit distance of `4`. Note: in all APIs the maximum allowed edit distance is `2`.
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -380,3 +380,9 @@ http.cors.allow-origin: /https?:\/\/localhost(:[0-9]+)?/
|
|||
The cluster state api doesn't return the `routing_nodes` section anymore when
|
||||
`routing_table` is requested. The newly introduced `routing_nodes` flag can
|
||||
be used separately to control whether `routing_nodes` should be returned.
|
||||
=== Query DSL
|
||||
|
||||
The `fuzzy_like_this` and `fuzzy_like_this_field` queries have been removed.
|
||||
|
||||
The `limit` filter is deprecated and becomes a no-op. You can achieve similar
|
||||
behaviour using the <<search-request-body,terminate_after>> parameter.
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[[query-dsl-limit-filter]]
|
||||
=== Limit Filter
|
||||
|
||||
deprecated[1.6.0, Use <<search-request-body,terminate_after>> instead]
|
||||
|
||||
A limit filter limits the number of documents (per shard) to execute on.
|
||||
For example:
|
||||
|
||||
|
|
|
@ -22,10 +22,6 @@ include::queries/dis-max-query.asciidoc[]
|
|||
|
||||
include::queries/filtered-query.asciidoc[]
|
||||
|
||||
include::queries/flt-query.asciidoc[]
|
||||
|
||||
include::queries/flt-field-query.asciidoc[]
|
||||
|
||||
include::queries/function-score-query.asciidoc[]
|
||||
|
||||
include::queries/fuzzy-query.asciidoc[]
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
[[query-dsl-flt-field-query]]
|
||||
=== Fuzzy Like This Field Query
|
||||
|
||||
The `fuzzy_like_this_field` query is the same as the `fuzzy_like_this`
|
||||
query, except that it runs against a single field. It provides nicer
|
||||
query DSL over the generic `fuzzy_like_this` query, and support typed
|
||||
fields query (automatically wraps typed fields with type filter to match
|
||||
only on the specific type).
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"fuzzy_like_this_field" : {
|
||||
"name.first" : {
|
||||
"like_text" : "text like this one",
|
||||
"max_query_terms" : 12
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
`fuzzy_like_this_field` can be shortened to `flt_field`.
|
||||
|
||||
The `fuzzy_like_this_field` top level parameters include:
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Parameter |Description
|
||||
|`like_text` |The text to find documents like it, *required*.
|
||||
|
||||
|`ignore_tf` |Should term frequency be ignored. Defaults to `false`.
|
||||
|
||||
|`max_query_terms` |The maximum number of query terms that will be
|
||||
included in any generated query. Defaults to `25`.
|
||||
|
||||
|`fuzziness` |The fuzziness of the term variants. Defaults
|
||||
to `0.5`. See <<fuzziness>>.
|
||||
|
||||
|`prefix_length` |Length of required common prefix on variant terms.
|
||||
Defaults to `0`.
|
||||
|
||||
|`boost` |Sets the boost value of the query. Defaults to `1.0`.
|
||||
|
||||
|`analyzer` |The analyzer that will be used to analyze the text.
|
||||
Defaults to the analyzer associated with the field.
|
||||
|=======================================================================
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
[[query-dsl-flt-query]]
|
||||
=== Fuzzy Like This Query
|
||||
|
||||
Fuzzy like this query find documents that are "like" provided text by
|
||||
running it against one or more fields.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"fuzzy_like_this" : {
|
||||
"fields" : ["name.first", "name.last"],
|
||||
"like_text" : "text like this one",
|
||||
"max_query_terms" : 12
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
`fuzzy_like_this` can be shortened to `flt`.
|
||||
|
||||
The `fuzzy_like_this` top level parameters include:
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Parameter |Description
|
||||
|`fields` |A list of the fields to run the more like this query against.
|
||||
Defaults to the `_all` field.
|
||||
|
||||
|`like_text` |The text to find documents like it, *required*.
|
||||
|
||||
|`ignore_tf` |Should term frequency be ignored. Defaults to `false`.
|
||||
|
||||
|`max_query_terms` |The maximum number of query terms that will be
|
||||
included in any generated query. Defaults to `25`.
|
||||
|
||||
|`fuzziness` |The minimum similarity of the term variants. Defaults
|
||||
to `0.5`. See <<fuzziness>>.
|
||||
|
||||
|`prefix_length` |Length of required common prefix on variant terms.
|
||||
Defaults to `0`.
|
||||
|
||||
|`boost` |Sets the boost value of the query. Defaults to `1.0`.
|
||||
|
||||
|`analyzer` |The analyzer that will be used to analyze the text.
|
||||
Defaults to the analyzer associated with the field.
|
||||
|=======================================================================
|
||||
|
||||
[float]
|
||||
==== How it Works
|
||||
|
||||
Fuzzifies ALL terms provided as strings and then picks the best n
|
||||
differentiating terms. In effect this mixes the behaviour of FuzzyQuery
|
||||
and MoreLikeThis but with special consideration of fuzzy scoring
|
||||
factors. This generally produces good results for queries where users
|
||||
may provide details in a number of fields and have no knowledge of
|
||||
boolean query syntax and also want a degree of fuzzy matching and a fast
|
||||
query.
|
||||
|
||||
For each source term the fuzzy variants are held in a BooleanQuery with
|
||||
no coord factor (because we are not looking for matches on multiple
|
||||
variants in any one doc). Additionally, a specialized TermQuery is used
|
||||
for variants and does not use that variant term's IDF because this would
|
||||
favor rarer terms, such as misspellings. Instead, all variants use the
|
||||
same IDF ranking (the one for the source query term) and this is
|
||||
factored into the variant's boost. If the source query term does not
|
||||
exist in the index the average IDF of the variants is used.
|
42
pom.xml
42
pom.xml
|
@ -551,6 +551,7 @@
|
|||
<param>-Xmx${tests.heap.size}</param>
|
||||
<param>-Xms${tests.heap.size}</param>
|
||||
<param>${java.permGenSpace}</param>
|
||||
<param>-Djava.library.path=${project.basedir}/lib/sigar</param>
|
||||
<param>-XX:MaxDirectMemorySize=512m</param>
|
||||
<param>-Des.logger.prefix=</param>
|
||||
<param>-XX:+HeapDumpOnOutOfMemoryError</param>
|
||||
|
@ -1409,7 +1410,7 @@
|
|||
<plugin>
|
||||
<groupId>de.thetaphi</groupId>
|
||||
<artifactId>forbiddenapis</artifactId>
|
||||
<version>1.7</version>
|
||||
<version>1.8</version>
|
||||
|
||||
<executions>
|
||||
<execution>
|
||||
|
@ -1599,6 +1600,45 @@
|
|||
<execute/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
<pluginExecution>
|
||||
<pluginExecutionFilter>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<versionRange>[1.0.0,)</versionRange>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<execute/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
<pluginExecution>
|
||||
<pluginExecutionFilter>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<versionRange>[1.0.0,)</versionRange>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
<pluginExecution>
|
||||
<pluginExecutionFilter>
|
||||
<groupId>com.mycila</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<versionRange>[1.0.0,)</versionRange>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
</pluginExecutions>
|
||||
</lifecycleMappingMetadata>
|
||||
</configuration>
|
||||
|
|
|
@ -230,7 +230,9 @@ public class Version {
|
|||
public static final int V_1_5_0_ID = 1050099;
|
||||
public static final Version V_1_5_0 = new Version(V_1_5_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_1_5_1_ID = 1050199;
|
||||
public static final Version V_1_5_1 = new Version(V_1_5_1_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final Version V_1_5_1 = new Version(V_1_5_1_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_1_5_2_ID = 1050299;
|
||||
public static final Version V_1_5_2 = new Version(V_1_5_2_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_1_6_0_ID = 1060099;
|
||||
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_2_0_0_ID = 2000099;
|
||||
|
@ -252,6 +254,8 @@ public class Version {
|
|||
return V_2_0_0;
|
||||
case V_1_6_0_ID:
|
||||
return V_1_6_0;
|
||||
case V_1_5_2_ID:
|
||||
return V_1_5_2;
|
||||
case V_1_5_1_ID:
|
||||
return V_1_5_1;
|
||||
case V_1_5_0_ID:
|
||||
|
|
|
@ -19,14 +19,18 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.*;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
|
@ -111,6 +115,18 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
return shell.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates only 1 vertex is tangential (shared) between the interior and exterior of a polygon
|
||||
*/
|
||||
protected void validateHole(BaseLineStringBuilder shell, BaseLineStringBuilder hole) {
|
||||
HashSet exterior = Sets.newHashSet(shell.points);
|
||||
HashSet interior = Sets.newHashSet(hole.points);
|
||||
exterior.retainAll(interior);
|
||||
if (exterior.size() >= 2) {
|
||||
throw new InvalidShapeException("Invalid polygon, interior cannot share more than one point with the exterior");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The coordinates setup by the builder will be assembled to a polygon. The result will consist of
|
||||
* a set of polygons. Each of these components holds a list of linestrings defining the polygon: the
|
||||
|
@ -125,6 +141,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
numEdges += holes.get(i).points.size()-1;
|
||||
validateHole(shell, this.holes.get(i));
|
||||
}
|
||||
|
||||
Edge[] edges = new Edge[numEdges];
|
||||
|
@ -253,28 +270,62 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
}
|
||||
|
||||
double shift = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0);
|
||||
double shiftOffset = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0);
|
||||
if (debugEnabled()) {
|
||||
LOGGER.debug("shift: {[]}", shift);
|
||||
LOGGER.debug("shift: {[]}", shiftOffset);
|
||||
}
|
||||
|
||||
// run along the border of the component, collect the
|
||||
// edges, shift them according to the dateline and
|
||||
// update the component id
|
||||
int length = 0;
|
||||
int length = 0, connectedComponents = 0;
|
||||
// if there are two connected components, splitIndex keeps track of where to split the edge array
|
||||
// start at 1 since the source coordinate is shared
|
||||
int splitIndex = 1;
|
||||
Edge current = edge;
|
||||
Edge prev = edge;
|
||||
// bookkeep the source and sink of each visited coordinate
|
||||
HashMap<Coordinate, Pair<Edge, Edge>> visitedEdge = new HashMap<>();
|
||||
do {
|
||||
|
||||
current.coordinate = shift(current.coordinate, shift);
|
||||
current.coordinate = shift(current.coordinate, shiftOffset);
|
||||
current.component = id;
|
||||
if(edges != null) {
|
||||
edges.add(current);
|
||||
|
||||
if (edges != null) {
|
||||
// found a closed loop - we have two connected components so we need to slice into two distinct components
|
||||
if (visitedEdge.containsKey(current.coordinate)) {
|
||||
if (connectedComponents > 0 && current.next != edge) {
|
||||
throw new InvalidShapeException("Shape contains more than one shared point");
|
||||
}
|
||||
|
||||
// a negative id flags the edge as visited for the edges(...) method.
|
||||
// since we're splitting connected components, we want the edges method to visit
|
||||
// the newly separated component
|
||||
final int visitID = -id;
|
||||
Edge firstAppearance = visitedEdge.get(current.coordinate).getRight();
|
||||
// correct the graph pointers by correcting the 'next' pointer for both the
|
||||
// first appearance and this appearance of the edge
|
||||
Edge temp = firstAppearance.next;
|
||||
firstAppearance.next = current.next;
|
||||
current.next = temp;
|
||||
current.component = visitID;
|
||||
// backtrack until we get back to this coordinate, setting the visit id to
|
||||
// a non-visited value (anything positive)
|
||||
do {
|
||||
prev.component = visitID;
|
||||
prev = visitedEdge.get(prev.coordinate).getLeft();
|
||||
++splitIndex;
|
||||
} while (!current.coordinate.equals(prev.coordinate));
|
||||
++connectedComponents;
|
||||
} else {
|
||||
visitedEdge.put(current.coordinate, Pair.of(prev, current));
|
||||
}
|
||||
edges.add(current);
|
||||
prev = current;
|
||||
}
|
||||
length++;
|
||||
} while((current = current.next) != edge);
|
||||
} while(connectedComponents == 0 && (current = current.next) != edge);
|
||||
|
||||
return length;
|
||||
return (splitIndex != 1) ? length-splitIndex: length;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -364,11 +415,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// if no intersection is found then the hole is not within the polygon, so
|
||||
// don't waste time calling a binary search
|
||||
final int pos;
|
||||
if (intersections == 0 ||
|
||||
(pos = Arrays.binarySearch(edges, 0, intersections, current, INTERSECTION_ORDER)) >= 0) {
|
||||
throw new ElasticsearchParseException("Invalid shape: Hole is not within polygon");
|
||||
boolean sharedVertex = false;
|
||||
if (intersections == 0 || ((pos = Arrays.binarySearch(edges, 0, intersections, current, INTERSECTION_ORDER)) >= 0)
|
||||
&& !(sharedVertex = (edges[pos].intersect.compareTo(current.coordinate) == 0)) ) {
|
||||
throw new InvalidShapeException("Invalid shape: Hole is not within polygon");
|
||||
}
|
||||
final int index = -(pos+2);
|
||||
final int index = -((sharedVertex) ? 0 : pos+2);
|
||||
final int component = -edges[index].component - numHoles - 1;
|
||||
|
||||
if(debugEnabled()) {
|
||||
|
@ -465,7 +517,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
Edge[] edges, int offset) {
|
||||
// inner rings (holes) have an opposite direction than the outer rings
|
||||
// XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F)
|
||||
boolean direction = (component != 0 ^ orientation == Orientation.RIGHT);
|
||||
boolean direction = (component == 0 ^ orientation == Orientation.RIGHT);
|
||||
// set the points array accordingly (shell or hole)
|
||||
Coordinate[] points = (hole != null) ? hole.coordinates(false) : shell.coordinates(false);
|
||||
Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.spatial4j.core.context.jts.JtsSpatialContext;
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.spatial4j.core.shape.jts.JtsGeometry;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
@ -250,6 +251,9 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
token = parser.nextToken();
|
||||
double lat = parser.doubleValue();
|
||||
token = parser.nextToken();
|
||||
while (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
return new CoordinateNode(new Coordinate(lon, lat));
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
throw new ElasticsearchIllegalArgumentException("coordinates cannot contain NULL values)");
|
||||
|
@ -446,7 +450,8 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
|
||||
protected Edge(Coordinate coordinate, Edge next, Coordinate intersection) {
|
||||
this.coordinate = coordinate;
|
||||
this.next = next;
|
||||
// use setter to catch duplicate point cases
|
||||
this.setNext(next);
|
||||
this.intersect = intersection;
|
||||
if (next != null) {
|
||||
this.component = next.component;
|
||||
|
@ -457,6 +462,17 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
this(coordinate, next, Edge.MAX_COORDINATE);
|
||||
}
|
||||
|
||||
protected void setNext(Edge next) {
|
||||
// don't bother setting next if its null
|
||||
if (next != null) {
|
||||
// self-loop throws an invalid shape
|
||||
if (this.coordinate.equals(next.coordinate)) {
|
||||
throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + this.coordinate);
|
||||
}
|
||||
this.next = next;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int top(Coordinate[] points, int offset, int length) {
|
||||
int top = 0; // we start at 1 here since top points to 0
|
||||
for (int i = 1; i < length; i++) {
|
||||
|
@ -522,17 +538,19 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
if (direction) {
|
||||
edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]);
|
||||
edges[edgeOffset + i].component = component;
|
||||
} else {
|
||||
} else if(!edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i])) {
|
||||
edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null);
|
||||
edges[edgeOffset + i - 1].component = component;
|
||||
} else {
|
||||
throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (direction) {
|
||||
edges[edgeOffset].next = edges[edgeOffset + length - 1];
|
||||
edges[edgeOffset].setNext(edges[edgeOffset + length - 1]);
|
||||
edges[edgeOffset].component = component;
|
||||
} else {
|
||||
edges[edgeOffset + length - 1].next = edges[edgeOffset];
|
||||
edges[edgeOffset + length - 1].setNext(edges[edgeOffset]);
|
||||
edges[edgeOffset + length - 1].component = component;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocValuesDocIdSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
public class LimitFilter extends NoCacheFilter {
|
||||
|
||||
private final int limit;
|
||||
private int counter;
|
||||
|
||||
public LimitFilter(int limit) {
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
if (counter > limit) {
|
||||
return null;
|
||||
}
|
||||
return new LimitDocIdSet(context.reader().maxDoc(), acceptDocs, limit);
|
||||
}
|
||||
|
||||
public class LimitDocIdSet extends DocValuesDocIdSet {
|
||||
|
||||
private final int limit;
|
||||
|
||||
public LimitDocIdSet(int maxDoc, @Nullable Bits acceptDocs, int limit) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (++counter > limit) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.NUM_BYTES_INT;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "limit(limit=" + limit + ")";
|
||||
}
|
||||
}
|
|
@ -147,18 +147,35 @@ public class NodeEnvironment extends AbstractComponent implements Closeable{
|
|||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("using node location [{}], local_node_id [{}]", nodePaths, localNodeId);
|
||||
}
|
||||
if (logger.isTraceEnabled()) {
|
||||
|
||||
// We do some I/O in here, so skip it if INFO is not enabled:
|
||||
if (logger.isInfoEnabled()) {
|
||||
StringBuilder sb = new StringBuilder("node data locations details:\n");
|
||||
for (Path file : nodePaths) {
|
||||
sb.append(" -> ")
|
||||
.append(file.toAbsolutePath())
|
||||
.append(", free_space [")
|
||||
.append(new ByteSizeValue(Files.getFileStore(file).getUnallocatedSpace()))
|
||||
// NOTE: FSDirectory.open creates the directory up above so it will exist here:
|
||||
sb.append(" -> ").append(file.toAbsolutePath());
|
||||
try {
|
||||
FileStore fileStore = getFileStore(file);
|
||||
boolean spins = IOUtils.spins(file);
|
||||
sb.append(", free_space [")
|
||||
.append(new ByteSizeValue(fileStore.getUnallocatedSpace()))
|
||||
.append("], usable_space [")
|
||||
.append(new ByteSizeValue(Files.getFileStore(file).getUsableSpace()))
|
||||
.append("]\n");
|
||||
.append(new ByteSizeValue(fileStore.getUsableSpace()))
|
||||
.append("], total_space [")
|
||||
.append(new ByteSizeValue(fileStore.getTotalSpace()))
|
||||
.append("], spins? [")
|
||||
.append(spins ? "possibly" : "no")
|
||||
.append("], mount [")
|
||||
.append(fileStore)
|
||||
.append("], type [")
|
||||
.append(fileStore.type())
|
||||
.append(']');
|
||||
} catch (Exception e) {
|
||||
sb.append(", ignoring exception gathering filesystem details: " + e);
|
||||
}
|
||||
logger.trace(sb.toString());
|
||||
sb.append('\n');
|
||||
}
|
||||
logger.info(sb.toString());
|
||||
}
|
||||
|
||||
this.nodeIndicesPaths = new Path[nodePaths.length];
|
||||
|
@ -167,7 +184,32 @@ public class NodeEnvironment extends AbstractComponent implements Closeable{
|
|||
}
|
||||
}
|
||||
|
||||
// NOTE: poached from Lucene's IOUtils:
|
||||
|
||||
// Files.getFileStore(Path) useless here!
|
||||
// don't complain, just try it yourself
|
||||
static FileStore getFileStore(Path path) throws IOException {
|
||||
FileStore store = Files.getFileStore(path);
|
||||
String mount = getMountPoint(store);
|
||||
|
||||
// find the "matching" FileStore from system list, it's the one we want.
|
||||
for (FileStore fs : path.getFileSystem().getFileStores()) {
|
||||
if (mount.equals(getMountPoint(fs))) {
|
||||
return fs;
|
||||
}
|
||||
}
|
||||
|
||||
// fall back to crappy one we got from Files.getFileStore
|
||||
return store;
|
||||
}
|
||||
|
||||
// NOTE: poached from Lucene's IOUtils:
|
||||
|
||||
// these are hacks that are not guaranteed
|
||||
static String getMountPoint(FileStore store) {
|
||||
String desc = store.toString();
|
||||
return desc.substring(0, desc.lastIndexOf('(') - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a shard data directory iff the shards locks were successfully acquired.
|
||||
|
|
|
@ -1061,4 +1061,10 @@ public abstract class Engine implements Closeable {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> the internal writer has any uncommitted changes. Otherwise <code>false</code>
|
||||
* @return
|
||||
*/
|
||||
public abstract boolean hasUncommittedChanges();
|
||||
}
|
||||
|
|
|
@ -940,6 +940,11 @@ public class InternalEngine extends Engine {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasUncommittedChanges() {
|
||||
return indexWriter.hasUncommittedChanges();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearcherManager getSearcherManager() {
|
||||
return searcherManager;
|
||||
|
|
|
@ -216,4 +216,9 @@ public class ShadowEngine extends Engine {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasUncommittedChanges() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeContext;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
|
||||
|
@ -262,6 +264,50 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeContext);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
mergeContext.addConflict("mapper [" + names.fullName() + "] has different field type");
|
||||
return;
|
||||
}
|
||||
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.defaultStrategy;
|
||||
|
||||
// prevent user from changing strategies
|
||||
if (!(this.defaultStrategy.getClass().equals(mergeWithStrategy.getClass()))) {
|
||||
mergeContext.addConflict("mapper [" + names.fullName() + "] has different strategy");
|
||||
}
|
||||
|
||||
final SpatialPrefixTree grid = this.defaultStrategy.getGrid();
|
||||
final SpatialPrefixTree mergeGrid = mergeWithStrategy.getGrid();
|
||||
|
||||
// prevent user from changing trees (changes encoding)
|
||||
if (!grid.getClass().equals(mergeGrid.getClass())) {
|
||||
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tree");
|
||||
}
|
||||
|
||||
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
|
||||
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
|
||||
if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) {
|
||||
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision");
|
||||
}
|
||||
|
||||
// bail if there were merge conflicts
|
||||
if (mergeContext.hasConflicts()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// change distance error percent
|
||||
this.defaultStrategy.setDistErrPct(mergeWithStrategy.getDistErrPct());
|
||||
|
||||
// change orientation - this is allowed because existing dateline spanning shapes
|
||||
// have already been unwound and segmented
|
||||
this.shapeOrientation = fieldMergeWith.shapeOrientation;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -39,7 +40,9 @@ public abstract class FilterBuilders {
|
|||
|
||||
/**
|
||||
* A filter that limits the results to the provided limit value (per shard!).
|
||||
* @deprecated Use {@link SearchRequestBuilder#setTerminateAfter(int)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static LimitFilterBuilder limitFilter(int limit) {
|
||||
return new LimitFilterBuilder(limit);
|
||||
}
|
||||
|
|
|
@ -1,148 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FuzzyLikeThisFieldQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<FuzzyLikeThisFieldQueryBuilder> {
|
||||
|
||||
private final String name;
|
||||
|
||||
private Float boost;
|
||||
|
||||
private String likeText = null;
|
||||
private Fuzziness fuzziness;
|
||||
private Integer prefixLength;
|
||||
private Integer maxQueryTerms;
|
||||
private Boolean ignoreTF;
|
||||
private String analyzer;
|
||||
private Boolean failOnUnsupportedField;
|
||||
private String queryName;
|
||||
|
||||
/**
|
||||
* A fuzzy more like this query on the provided field.
|
||||
*
|
||||
* @param name the name of the field
|
||||
*/
|
||||
public FuzzyLikeThisFieldQueryBuilder(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* The text to use in order to find documents that are "like" this.
|
||||
*/
|
||||
public FuzzyLikeThisFieldQueryBuilder likeText(String likeText) {
|
||||
this.likeText = likeText;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisFieldQueryBuilder fuzziness(Fuzziness fuzziness) {
|
||||
this.fuzziness = fuzziness;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisFieldQueryBuilder prefixLength(int prefixLength) {
|
||||
this.prefixLength = prefixLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisFieldQueryBuilder maxQueryTerms(int maxQueryTerms) {
|
||||
this.maxQueryTerms = maxQueryTerms;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisFieldQueryBuilder ignoreTF(boolean ignoreTF) {
|
||||
this.ignoreTF = ignoreTF;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The analyzer that will be used to analyze the text. Defaults to the analyzer associated with the field.
|
||||
*/
|
||||
public FuzzyLikeThisFieldQueryBuilder analyzer(String analyzer) {
|
||||
this.analyzer = analyzer;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FuzzyLikeThisFieldQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.
|
||||
*/
|
||||
public FuzzyLikeThisFieldQueryBuilder failOnUnsupportedField(boolean fail) {
|
||||
failOnUnsupportedField = fail;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
|
||||
*/
|
||||
public FuzzyLikeThisFieldQueryBuilder queryName(String queryName) {
|
||||
this.queryName = queryName;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(FuzzyLikeThisFieldQueryParser.NAME);
|
||||
builder.startObject(name);
|
||||
if (likeText == null) {
|
||||
throw new ElasticsearchIllegalArgumentException("fuzzyLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
builder.field("like_text", likeText);
|
||||
if (maxQueryTerms != null) {
|
||||
builder.field("max_query_terms", maxQueryTerms);
|
||||
}
|
||||
if (fuzziness != null) {
|
||||
fuzziness.toXContent(builder, params);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (ignoreTF != null) {
|
||||
builder.field("ignore_tf", ignoreTF);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (failOnUnsupportedField != null) {
|
||||
builder.field("fail_on_unsupported_field", failOnUnsupportedField);
|
||||
}
|
||||
if (queryName != null) {
|
||||
builder.field("_name", queryName);
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
|
@ -1,160 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* fuzzy_like_this_field : {
|
||||
* field1 : {
|
||||
* maxNumTerms : 12,
|
||||
* boost : 1.1,
|
||||
* likeText : "..."
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class FuzzyLikeThisFieldQueryParser implements QueryParser {
|
||||
|
||||
public static final String NAME = "flt_field";
|
||||
private static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.fromSimilarity(0.5f);
|
||||
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
|
||||
|
||||
@Inject
|
||||
public FuzzyLikeThisFieldQueryParser() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{NAME, "fuzzy_like_this_field", Strings.toCamelCase(NAME), "fuzzyLikeThisField"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
int maxNumTerms = 25;
|
||||
float boost = 1.0f;
|
||||
String likeText = null;
|
||||
Fuzziness fuzziness = DEFAULT_FUZZINESS;
|
||||
int prefixLength = 0;
|
||||
boolean ignoreTF = false;
|
||||
Analyzer analyzer = null;
|
||||
boolean failOnUnsupportedField = true;
|
||||
String queryName = null;
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt_field] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
// now, we move after the field name, which starts the object
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt_field] query malformed, no start_object");
|
||||
}
|
||||
|
||||
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if ("like_text".equals(currentFieldName) || "likeText".equals(currentFieldName)) {
|
||||
likeText = parser.text();
|
||||
} else if ("max_query_terms".equals(currentFieldName) || "maxQueryTerms".equals(currentFieldName)) {
|
||||
maxNumTerms = parser.intValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("ignore_tf".equals(currentFieldName) || "ignoreTF".equals(currentFieldName)) {
|
||||
ignoreTF = parser.booleanValue();
|
||||
} else if (FUZZINESS.match(currentFieldName, parseContext.parseFlags())) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = parseContext.analysisService().analyzer(parser.text());
|
||||
} else if ("fail_on_unsupported_field".equals(currentFieldName) || "failOnUnsupportedField".equals(currentFieldName)) {
|
||||
failOnUnsupportedField = parser.booleanValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt_field] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (likeText == null) {
|
||||
throw new QueryParsingException(parseContext.index(), "fuzzy_like_This_field requires 'like_text' to be specified");
|
||||
}
|
||||
|
||||
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
|
||||
if (smartNameFieldMappers != null) {
|
||||
if (smartNameFieldMappers.hasMapper()) {
|
||||
fieldName = smartNameFieldMappers.mapper().names().indexName();
|
||||
if (analyzer == null) {
|
||||
analyzer = smartNameFieldMappers.mapper().searchAnalyzer();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (analyzer == null) {
|
||||
analyzer = parseContext.mapperService().searchAnalyzer();
|
||||
}
|
||||
if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {
|
||||
if (failOnUnsupportedField) {
|
||||
throw new ElasticsearchIllegalArgumentException("fuzzy_like_this_field doesn't support binary/numeric fields: [" + fieldName + "]");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
FuzzyLikeThisQuery fuzzyLikeThisQuery = new FuzzyLikeThisQuery(maxNumTerms, analyzer);
|
||||
fuzzyLikeThisQuery.addTerms(likeText, fieldName, fuzziness.asSimilarity(), prefixLength);
|
||||
fuzzyLikeThisQuery.setBoost(boost);
|
||||
fuzzyLikeThisQuery.setIgnoreTF(ignoreTF);
|
||||
|
||||
// move to the next end object, to close the field name
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt_field] query malformed, no end_object");
|
||||
}
|
||||
assert token == XContentParser.Token.END_OBJECT;
|
||||
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, fuzzyLikeThisQuery);
|
||||
}
|
||||
return fuzzyLikeThisQuery;
|
||||
}
|
||||
}
|
|
@ -1,160 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FuzzyLikeThisQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<FuzzyLikeThisQueryBuilder> {
|
||||
|
||||
private final String[] fields;
|
||||
|
||||
private Float boost;
|
||||
|
||||
private String likeText = null;
|
||||
private Fuzziness fuzziness;
|
||||
private Integer prefixLength;
|
||||
private Integer maxQueryTerms;
|
||||
private Boolean ignoreTF;
|
||||
private String analyzer;
|
||||
private Boolean failOnUnsupportedField;
|
||||
private String queryName;
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy like this query which uses the "_all" field.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder() {
|
||||
this.fields = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the field names that will be used when generating the 'Fuzzy Like This' query.
|
||||
*
|
||||
* @param fields the field names that will be used when generating the 'Fuzzy Like This' query.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder(String... fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* The text to use in order to find documents that are "like" this.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder likeText(String likeText) {
|
||||
this.likeText = likeText;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisQueryBuilder fuzziness(Fuzziness fuzziness) {
|
||||
this.fuzziness = fuzziness;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisQueryBuilder prefixLength(int prefixLength) {
|
||||
this.prefixLength = prefixLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisQueryBuilder maxQueryTerms(int maxQueryTerms) {
|
||||
this.maxQueryTerms = maxQueryTerms;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyLikeThisQueryBuilder ignoreTF(boolean ignoreTF) {
|
||||
this.ignoreTF = ignoreTF;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The analyzer that will be used to analyze the text. Defaults to the analyzer associated with the fied.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder analyzer(String analyzer) {
|
||||
this.analyzer = analyzer;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FuzzyLikeThisQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder failOnUnsupportedField(boolean fail) {
|
||||
failOnUnsupportedField = fail;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
|
||||
*/
|
||||
public FuzzyLikeThisQueryBuilder queryName(String queryName) {
|
||||
this.queryName = queryName;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(FuzzyLikeThisQueryParser.NAME);
|
||||
if (fields != null) {
|
||||
builder.startArray("fields");
|
||||
for (String field : fields) {
|
||||
builder.value(field);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (likeText == null) {
|
||||
throw new ElasticsearchIllegalArgumentException("fuzzyLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
builder.field("like_text", likeText);
|
||||
if (maxQueryTerms != null) {
|
||||
builder.field("max_query_terms", maxQueryTerms);
|
||||
}
|
||||
if (fuzziness != null) {
|
||||
fuzziness.toXContent(builder, params);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (ignoreTF != null) {
|
||||
builder.field("ignore_tf", ignoreTF);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (failOnUnsupportedField != null) {
|
||||
builder.field("fail_on_unsupported_field", failOnUnsupportedField);
|
||||
}
|
||||
if (queryName != null) {
|
||||
builder.field("_name", queryName);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* fuzzy_like_this : {
|
||||
* maxNumTerms : 12,
|
||||
* boost : 1.1,
|
||||
* fields : ["field1", "field2"]
|
||||
* likeText : "..."
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class FuzzyLikeThisQueryParser implements QueryParser {
|
||||
|
||||
public static final String NAME = "flt";
|
||||
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
|
||||
|
||||
@Inject
|
||||
public FuzzyLikeThisQueryParser() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{NAME, "fuzzy_like_this", "fuzzyLikeThis"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
int maxNumTerms = 25;
|
||||
float boost = 1.0f;
|
||||
List<String> fields = null;
|
||||
String likeText = null;
|
||||
Fuzziness fuzziness = Fuzziness.TWO;
|
||||
int prefixLength = 0;
|
||||
boolean ignoreTF = false;
|
||||
Analyzer analyzer = null;
|
||||
boolean failOnUnsupportedField = true;
|
||||
String queryName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if ("like_text".equals(currentFieldName) || "likeText".equals(currentFieldName)) {
|
||||
likeText = parser.text();
|
||||
} else if ("max_query_terms".equals(currentFieldName) || "maxQueryTerms".equals(currentFieldName)) {
|
||||
maxNumTerms = parser.intValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("ignore_tf".equals(currentFieldName) || "ignoreTF".equals(currentFieldName)) {
|
||||
ignoreTF = parser.booleanValue();
|
||||
} else if (FUZZINESS.match(currentFieldName, parseContext.parseFlags())) {
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
|
||||
prefixLength = parser.intValue();
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = parseContext.analysisService().analyzer(parser.text());
|
||||
} else if ("fail_on_unsupported_field".equals(currentFieldName) || "failOnUnsupportedField".equals(currentFieldName)) {
|
||||
failOnUnsupportedField = parser.booleanValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("fields".equals(currentFieldName)) {
|
||||
fields = Lists.newLinkedList();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
fields.add(parseContext.indexName(parser.text()));
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "[flt] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (likeText == null) {
|
||||
throw new QueryParsingException(parseContext.index(), "fuzzy_like_this requires 'like_text' to be specified");
|
||||
}
|
||||
|
||||
if (analyzer == null) {
|
||||
analyzer = parseContext.mapperService().searchAnalyzer();
|
||||
}
|
||||
|
||||
FuzzyLikeThisQuery query = new FuzzyLikeThisQuery(maxNumTerms, analyzer);
|
||||
if (fields == null) {
|
||||
fields = Lists.newArrayList(parseContext.defaultField());
|
||||
} else if (fields.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext.index(), "fuzzy_like_this requires 'fields' to be non-empty");
|
||||
}
|
||||
for (Iterator<String> it = fields.iterator(); it.hasNext(); ) {
|
||||
final String fieldName = it.next();
|
||||
if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {
|
||||
if (failOnUnsupportedField) {
|
||||
throw new ElasticsearchIllegalArgumentException("more_like_this doesn't support binary/numeric fields: [" + fieldName + "]");
|
||||
} else {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fields.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
float minSimilarity = fuzziness.asFloat();
|
||||
if (minSimilarity >= 1.0f && minSimilarity != (int)minSimilarity) {
|
||||
throw new ElasticsearchIllegalArgumentException("fractional edit distances are not allowed");
|
||||
}
|
||||
if (minSimilarity < 0.0f) {
|
||||
throw new ElasticsearchIllegalArgumentException("minimumSimilarity cannot be less than 0");
|
||||
}
|
||||
for (String field : fields) {
|
||||
query.addTerms(likeText, field, minSimilarity, prefixLength);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
query.setIgnoreTF(ignoreTF);
|
||||
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
|
@ -19,10 +19,15 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link SearchRequestBuilder#setTerminateAfter(int)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class LimitFilterBuilder extends BaseFilterBuilder {
|
||||
|
||||
private final int limit;
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.LimitFilter;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -62,6 +62,7 @@ public class LimitFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "No value specified for limit filter");
|
||||
}
|
||||
|
||||
return new LimitFilter(limit);
|
||||
// this filter is deprecated and parses to a filter that matches everything
|
||||
return Queries.MATCH_ALL_FILTER;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -458,31 +458,6 @@ public abstract class QueryBuilders {
|
|||
return new MoreLikeThisQueryBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisQueryBuilder#likeText(String)}
|
||||
* which is checked against the fields the query is constructed with.
|
||||
*
|
||||
* @param fields The fields to run the query against
|
||||
*/
|
||||
public static FuzzyLikeThisQueryBuilder fuzzyLikeThisQuery(String... fields) {
|
||||
return new FuzzyLikeThisQueryBuilder(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisQueryBuilder#likeText(String)}
|
||||
* which is checked against the "_all" field.
|
||||
*/
|
||||
public static FuzzyLikeThisQueryBuilder fuzzyLikeThisQuery() {
|
||||
return new FuzzyLikeThisQueryBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisFieldQueryBuilder#likeText(String)}.
|
||||
*/
|
||||
public static FuzzyLikeThisFieldQueryBuilder fuzzyLikeThisFieldQuery(String name) {
|
||||
return new FuzzyLikeThisFieldQueryBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new scoring child query, with the child type and the query to run on the child documents. The
|
||||
* results of this query are the parent docs that those child docs matched.
|
||||
|
|
|
@ -94,8 +94,6 @@ public class IndicesQueriesModule extends AbstractModule {
|
|||
qpBinders.addBinding().to(SpanNearQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(SpanOrQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(MoreLikeThisQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(FuzzyLikeThisQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(FuzzyLikeThisFieldQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(WrapperQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(IndicesQueryParser.class).asEagerSingleton();
|
||||
qpBinders.addBinding().to(CommonTermsQueryParser.class).asEagerSingleton();
|
||||
|
|
|
@ -358,9 +358,9 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
* Calculate the percentage of {@code used} from the {@code max} number.
|
||||
* @param used The currently used number.
|
||||
* @param max The maximum number.
|
||||
* @return 0 if {@code max} is 0. Otherwise 100 * {@code used} / {@code max}.
|
||||
* @return 0 if {@code max} is <= 0. Otherwise 100 * {@code used} / {@code max}.
|
||||
*/
|
||||
private short calculatePercentage(long used, long max) {
|
||||
return max == 0 ? 0 : (short)((100d * used) / max);
|
||||
return max <= 0 ? 0 : (short)((100d * used) / max);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.bwcompat;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.LifecycleScope;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -55,13 +56,18 @@ import org.hamcrest.Matchers;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URL;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.FileVisitor;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
@ -75,7 +81,8 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
// We have a 0.20.6.zip etc for this.
|
||||
|
||||
static List<String> indexes;
|
||||
static Path indicesDir;
|
||||
static Path singleDataPath;
|
||||
static Path[] multiDataPath;
|
||||
|
||||
@BeforeClass
|
||||
public static void initIndexesList() throws Exception {
|
||||
|
@ -93,7 +100,8 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
@AfterClass
|
||||
public static void tearDownStatics() {
|
||||
indexes = null;
|
||||
indicesDir = null;
|
||||
singleDataPath = null;
|
||||
multiDataPath = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,17 +116,37 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
void setupCluster() throws Exception {
|
||||
ListenableFuture<List<String>> replicas = internalCluster().startNodesAsync(1); // for replicas
|
||||
|
||||
Path dataDir = newTempDirPath(LifecycleScope.SUITE);
|
||||
Path baseTempDir = newTempDirPath(LifecycleScope.SUITE);
|
||||
// start single data path node
|
||||
ImmutableSettings.Builder nodeSettings = ImmutableSettings.builder()
|
||||
.put("path.data", dataDir.toAbsolutePath())
|
||||
.put("path.data", baseTempDir.resolve("single-path").toAbsolutePath())
|
||||
.put("node.master", false); // workaround for dangling index loading issue when node is master
|
||||
String loadingNode = internalCluster().startNode(nodeSettings.build());
|
||||
ListenableFuture<String> singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
|
||||
|
||||
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, loadingNode).nodeDataPaths();
|
||||
// start multi data path node
|
||||
nodeSettings = ImmutableSettings.builder()
|
||||
.put("path.data", baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath())
|
||||
.put("node.master", false); // workaround for dangling index loading issue when node is master
|
||||
ListenableFuture<String> multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
|
||||
|
||||
// find single data path dir
|
||||
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNode.get()).nodeDataPaths();
|
||||
assertEquals(1, nodePaths.length);
|
||||
indicesDir = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
|
||||
assertFalse(Files.exists(indicesDir));
|
||||
Files.createDirectories(indicesDir);
|
||||
singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
|
||||
assertFalse(Files.exists(singleDataPath));
|
||||
Files.createDirectories(singleDataPath);
|
||||
logger.info("--> Single data path: " + singleDataPath.toString());
|
||||
|
||||
// find multi data path dirs
|
||||
nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNode.get()).nodeDataPaths();
|
||||
assertEquals(2, nodePaths.length);
|
||||
multiDataPath = new Path[] {nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER),
|
||||
nodePaths[1].resolve(NodeEnvironment.INDICES_FOLDER)};
|
||||
assertFalse(Files.exists(multiDataPath[0]));
|
||||
assertFalse(Files.exists(multiDataPath[1]));
|
||||
Files.createDirectories(multiDataPath[0]);
|
||||
Files.createDirectories(multiDataPath[1]);
|
||||
logger.info("--> Multi data paths: " + multiDataPath[0].toString() + ", " + multiDataPath[1].toString());
|
||||
|
||||
replicas.get(); // wait for replicas
|
||||
}
|
||||
|
@ -143,13 +171,15 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
|
||||
// the bwc scripts packs the indices under this path
|
||||
Path src = list[0].resolve("nodes/0/indices/" + indexName);
|
||||
Path dest = indicesDir.resolve(indexName);
|
||||
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
|
||||
|
||||
logger.info("--> injecting index [{}] into path [{}]", indexName, dest);
|
||||
Files.move(src, dest);
|
||||
assertFalse(Files.exists(src));
|
||||
assertTrue(Files.exists(dest));
|
||||
if (randomBoolean()) {
|
||||
logger.info("--> injecting index [{}] into single data path", indexName);
|
||||
copyIndex(src, indexName, singleDataPath);
|
||||
} else {
|
||||
logger.info("--> injecting index [{}] into multi data path", indexName);
|
||||
copyIndex(src, indexName, multiDataPath);
|
||||
}
|
||||
|
||||
// force reloading dangling indices with a cluster state republish
|
||||
client().admin().cluster().prepareReroute().get();
|
||||
|
@ -157,8 +187,43 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
return indexName;
|
||||
}
|
||||
|
||||
// randomly distribute the files from src over dests paths
|
||||
void copyIndex(final Path src, final String indexName, final Path... dests) throws IOException {
|
||||
for (Path dest : dests) {
|
||||
Path indexDir = dest.resolve(indexName);
|
||||
assertFalse(Files.exists(indexDir));
|
||||
Files.createDirectories(indexDir);
|
||||
}
|
||||
Files.walkFileTree(src, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||
Path relativeDir = src.relativize(dir);
|
||||
for (Path dest : dests) {
|
||||
Path destDir = dest.resolve(indexName).resolve(relativeDir);
|
||||
Files.createDirectories(destDir);
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) {
|
||||
// skip lock file, we don't need it
|
||||
logger.trace("Skipping lock file: " + file.toString());
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
Path relativeFile = src.relativize(file);
|
||||
Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile);
|
||||
logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString());
|
||||
Files.move(file, destFile);
|
||||
assertFalse(Files.exists(file));
|
||||
assertTrue(Files.exists(destFile));
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void unloadIndex(String indexName) throws Exception {
|
||||
client().admin().indices().prepareFlush(indexName).setWaitIfOngoing(true).setForce(true).get(); // temporary for debugging
|
||||
ElasticsearchAssertions.assertAcked(client().admin().indices().prepareDelete(indexName).get());
|
||||
ElasticsearchAssertions.assertAllFilesClosed();
|
||||
}
|
||||
|
@ -201,20 +266,6 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
|
||||
Collections.shuffle(indexes, getRandom());
|
||||
for (String index : indexes) {
|
||||
if (index.equals("index-0.90.13.zip") == false) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
logger.info("--> Testing old index " + index);
|
||||
assertOldIndexWorks(index);
|
||||
logger.info("--> Done testing " + index + ", took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@TestLogging("test.engine:TRACE,index.engine:TRACE,test.engine.lucene:TRACE,index.engine.lucene:TRACE")
|
||||
public void testShitSlowIndex() throws Exception {
|
||||
setupCluster();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
String index = "index-0.90.13.zip";
|
||||
long startTime = System.currentTimeMillis();
|
||||
logger.info("--> Testing old index " + index);
|
||||
assertOldIndexWorks(index);
|
||||
|
@ -320,7 +371,7 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
|
|||
assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(ImmutableSettings.builder()
|
||||
.put("number_of_replicas", numReplicas)
|
||||
).execute().actionGet());
|
||||
ensureGreen(TimeValue.timeValueMinutes(1), indexName);
|
||||
ensureGreen(TimeValue.timeValueMinutes(2), indexName);
|
||||
logger.debug("--> index [{}] is green, took [{}]", indexName, TimeValue.timeValueMillis(System.currentTimeMillis() - startTime));
|
||||
logger.debug("--> recovery status:\n{}", XContentHelper.toString(client().admin().indices().prepareRecoveries(indexName).get()));
|
||||
|
||||
|
|
|
@ -115,6 +115,32 @@ public class GeoJSONShapeParserTests extends ElasticsearchTestCase {
|
|||
assertGeometryEquals(expected, multilinesGeoJson);
|
||||
}
|
||||
|
||||
public void testParse_multiDimensionShapes() throws IOException {
|
||||
// multi dimension point
|
||||
String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point")
|
||||
.startArray("coordinates").value(100.0).value(0.0).value(15.0).value(18.0).endArray()
|
||||
.endObject().string();
|
||||
|
||||
Point expectedPt = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
|
||||
assertGeometryEquals(new JtsPoint(expectedPt, SPATIAL_CONTEXT), pointGeoJson);
|
||||
|
||||
// multi dimension linestring
|
||||
String lineGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "LineString")
|
||||
.startArray("coordinates")
|
||||
.startArray().value(100.0).value(0.0).value(15.0).endArray()
|
||||
.startArray().value(101.0).value(1.0).value(18.0).value(19.0).endArray()
|
||||
.endArray()
|
||||
.endObject().string();
|
||||
|
||||
List<Coordinate> lineCoordinates = new ArrayList<>();
|
||||
lineCoordinates.add(new Coordinate(100, 0));
|
||||
lineCoordinates.add(new Coordinate(101, 1));
|
||||
|
||||
LineString expectedLS = GEOMETRY_FACTORY.createLineString(
|
||||
lineCoordinates.toArray(new Coordinate[lineCoordinates.size()]));
|
||||
assertGeometryEquals(jtsGeom(expectedLS), lineGeoJson);
|
||||
}
|
||||
|
||||
public void testParse_envelope() throws IOException {
|
||||
// test #1: envelope with expected coordinate order (TopLeft, BottomRight)
|
||||
String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
|
||||
|
@ -267,7 +293,7 @@ public class GeoJSONShapeParserTests extends ElasticsearchTestCase {
|
|||
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(multiPolygonGeoJson);
|
||||
parser.nextToken();
|
||||
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
|
||||
ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class);
|
||||
}
|
||||
|
||||
public void testParse_OGCPolygonWithoutHoles() throws IOException {
|
||||
|
@ -567,11 +593,12 @@ public class GeoJSONShapeParserTests extends ElasticsearchTestCase {
|
|||
.endArray()
|
||||
.endObject().string();
|
||||
|
||||
// add 3d point to test ISSUE #10501
|
||||
List<Coordinate> shellCoordinates = new ArrayList<>();
|
||||
shellCoordinates.add(new Coordinate(100, 0));
|
||||
shellCoordinates.add(new Coordinate(100, 0, 15.0));
|
||||
shellCoordinates.add(new Coordinate(101, 0));
|
||||
shellCoordinates.add(new Coordinate(101, 1));
|
||||
shellCoordinates.add(new Coordinate(100, 1));
|
||||
shellCoordinates.add(new Coordinate(100, 1, 10.0));
|
||||
shellCoordinates.add(new Coordinate(100, 0));
|
||||
|
||||
List<Coordinate> holeCoordinates = new ArrayList<>();
|
||||
|
|
|
@ -28,6 +28,7 @@ import com.spatial4j.core.shape.impl.PointImpl;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -39,14 +40,12 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.*;
|
|||
*/
|
||||
public class ShapeBuilderTests extends ElasticsearchTestCase {
|
||||
|
||||
@Test
|
||||
public void testNewPoint() {
|
||||
Point point = ShapeBuilder.newPoint(-100, 45).build();
|
||||
assertEquals(-100D, point.getX(), 0.0d);
|
||||
assertEquals(45D, point.getY(), 0.0d);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewRectangle() {
|
||||
Rectangle rectangle = ShapeBuilder.newEnvelope().topLeft(-45, 30).bottomRight(45, -30).build();
|
||||
assertEquals(-45D, rectangle.getMinX(), 0.0d);
|
||||
|
@ -55,7 +54,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertEquals(30D, rectangle.getMaxY(), 0.0d);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewPolygon() {
|
||||
Polygon polygon = ShapeBuilder.newPolygon()
|
||||
.point(-45, 30)
|
||||
|
@ -71,7 +69,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewPolygon_coordinate() {
|
||||
Polygon polygon = ShapeBuilder.newPolygon()
|
||||
.point(new Coordinate(-45, 30))
|
||||
|
@ -87,7 +84,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewPolygon_coordinates() {
|
||||
Polygon polygon = ShapeBuilder.newPolygon()
|
||||
.points(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30)).toPolygon();
|
||||
|
@ -99,7 +95,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLineStringBuilder() {
|
||||
// Building a simple LineString
|
||||
ShapeBuilder.newLineString()
|
||||
|
@ -141,7 +136,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiLineString() {
|
||||
ShapeBuilder.newMultiLinestring()
|
||||
.linestring()
|
||||
|
@ -186,7 +180,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.close().build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeoCircle() {
|
||||
double earthCircumference = 40075016.69;
|
||||
Circle circle = ShapeBuilder.newCircleBuilder().center(0, 0).radius("100m").build();
|
||||
|
@ -212,7 +205,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertEquals(new PointImpl(randomLon, randomLat, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPolygonWrapping() {
|
||||
Shape shape = ShapeBuilder.newPolygon()
|
||||
.point(-150.0, 65.0)
|
||||
|
@ -224,7 +216,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLineStringWrapping() {
|
||||
Shape shape = ShapeBuilder.newLineString()
|
||||
.point(-150.0, 65.0)
|
||||
|
@ -232,11 +223,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(-250.0, -65.0)
|
||||
.point(-150.0, -65.0)
|
||||
.build();
|
||||
|
||||
assertMultiLineString(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDatelineOGC() {
|
||||
// tests that the following shape (defined in counterclockwise OGC order)
|
||||
// https://gist.github.com/anonymous/7f1bb6d7e9cd72f5977c crosses the dateline
|
||||
|
@ -275,11 +264,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(-179,1);
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDateline() {
|
||||
// tests that the following shape (defined in clockwise non-OGC order)
|
||||
// https://gist.github.com/anonymous/7f1bb6d7e9cd72f5977c crosses the dateline
|
||||
|
@ -318,11 +305,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(-179,1);
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testComplexShapeWithHole() {
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(-85.0018514,37.1311314)
|
||||
|
@ -393,11 +378,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(-85.0000002,37.1317672);
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
|
||||
assertPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShapeWithHoleAtEdgeEndPoints() {
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(-4, 2)
|
||||
|
@ -416,11 +399,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(4, 1);
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
|
||||
assertPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShapeWithPointOnDateline() {
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(180, 0)
|
||||
|
@ -429,11 +410,9 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(180, 0);
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
|
||||
assertPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShapeWithEdgeAlongDateline() {
|
||||
// test case 1: test the positive side of the dateline
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
|
@ -456,7 +435,6 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertPolygon(shape);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShapeWithBoundaryHoles() {
|
||||
// test case 1: test the positive side of the dateline
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
|
@ -481,7 +459,7 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
.point(179, 10)
|
||||
.point(179, -10)
|
||||
.point(-176, -15)
|
||||
.point(-172,0);
|
||||
.point(-172, 0);
|
||||
builder.hole()
|
||||
.point(-176, 10)
|
||||
.point(-176, -10)
|
||||
|
@ -492,6 +470,89 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
public void testShapeWithTangentialHole() {
|
||||
// test a shape with one tangential (shared) vertex (should pass)
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(179, 10)
|
||||
.point(168, 15)
|
||||
.point(164, 0)
|
||||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test(expected = InvalidShapeException.class)
|
||||
public void testShapeWithInvalidTangentialHole() {
|
||||
// test a shape with one invalid tangential (shared) vertex (should throw exception)
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(179, 10)
|
||||
.point(168, 15)
|
||||
.point(164, 0)
|
||||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
.point(164, 0)
|
||||
.point(175, 10)
|
||||
.point(175, 5)
|
||||
.point(179, -10)
|
||||
.point(164, 0);
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
public void testBoundaryShapeWithTangentialHole() {
|
||||
// test a shape with one tangential (shared) vertex for each hole (should pass)
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(-177, 10)
|
||||
.point(176, 15)
|
||||
.point(172, 0)
|
||||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
.point(172, 0)
|
||||
.point(176, 10)
|
||||
.point(176, -5)
|
||||
.point(172, 0);
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test(expected = InvalidShapeException.class)
|
||||
public void testBoundaryShapeWithInvalidTangentialHole() {
|
||||
// test shape with two tangential (shared) vertices (should throw exception)
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(-177, 10)
|
||||
.point(176, 15)
|
||||
.point(172, 0)
|
||||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
.point(-177, 10)
|
||||
.point(172, 0)
|
||||
.point(180, -5)
|
||||
.point(176, -10)
|
||||
.point(-177, 10);
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test an enveloping polygon around the max mercator bounds
|
||||
*/
|
||||
|
@ -510,7 +571,7 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
|
||||
@Test
|
||||
public void testShapeWithAlternateOrientation() {
|
||||
// ccw: should produce a single polygon spanning hemispheres
|
||||
// cw: should produce a multi polygon spanning hemispheres
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(180, 0)
|
||||
.point(176, 4)
|
||||
|
@ -531,4 +592,16 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
|||
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
||||
@Test(expected = InvalidShapeException.class)
|
||||
public void testInvalidShapeWithConsecutiveDuplicatePoints() {
|
||||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(180, 0)
|
||||
.point(176, 4)
|
||||
.point(176, 4)
|
||||
.point(-176, 4)
|
||||
.point(180, 0);
|
||||
Shape shape = builder.close().build();
|
||||
assertPolygon(shape);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -303,7 +303,7 @@ public class CountQueryTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4"));
|
||||
|
||||
CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get();
|
||||
assertHitCount(countResponse, 2l);
|
||||
assertHitCount(countResponse, 4l); // limit is a no-op
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.flt;
|
||||
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.fuzzyLikeThisFieldQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.fuzzyLikeThisQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FuzzyLikeThisActionTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
@Test
|
||||
// See issue https://github.com/elasticsearch/elasticsearch/issues/3252
|
||||
public void testNumericField() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", "int_value", "type=integer"));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type", "1")
|
||||
.setSource(jsonBuilder().startObject().field("string_value", "lucene index").field("int_value", 1).endObject())
|
||||
.execute().actionGet();
|
||||
client().prepareIndex("test", "type", "2")
|
||||
.setSource(jsonBuilder().startObject().field("string_value", "elasticsearch index").field("int_value", 42).endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
refresh();
|
||||
|
||||
// flt query with no field -> OK
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery().likeText("index")).execute().actionGet();
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
|
||||
// flt query with string fields
|
||||
searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery("string_value").likeText("index")).execute().actionGet();
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
|
||||
// flt query with at least a numeric field -> fail by default
|
||||
assertThrows(client().prepareSearch().setQuery(fuzzyLikeThisQuery("string_value", "int_value").likeText("index")), SearchPhaseExecutionException.class);
|
||||
|
||||
// flt query with at least a numeric field -> fail by command
|
||||
assertThrows(client().prepareSearch().setQuery(fuzzyLikeThisQuery("string_value", "int_value").likeText("index").failOnUnsupportedField(true)), SearchPhaseExecutionException.class);
|
||||
|
||||
|
||||
// flt query with at least a numeric field but fail_on_unsupported_field set to false
|
||||
searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery("string_value", "int_value").likeText("index").failOnUnsupportedField(false)).execute().actionGet();
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
|
||||
// flt field query on a numeric field -> failure by default
|
||||
assertThrows(client().prepareSearch().setQuery(fuzzyLikeThisFieldQuery("int_value").likeText("42")), SearchPhaseExecutionException.class);
|
||||
|
||||
// flt field query on a numeric field -> failure by command
|
||||
assertThrows(client().prepareSearch().setQuery(fuzzyLikeThisFieldQuery("int_value").likeText("42").failOnUnsupportedField(true)), SearchPhaseExecutionException.class);
|
||||
|
||||
// flt field query on a numeric field but fail_on_unsupported_field set to false
|
||||
searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisFieldQuery("int_value").likeText("42").failOnUnsupportedField(false)).execute().actionGet();
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));
|
||||
}
|
||||
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -31,9 +32,13 @@ import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.isIn;
|
||||
|
||||
public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
|
||||
|
||||
|
@ -291,4 +296,63 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeoShapeMapperMerge() throws Exception {
|
||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive")
|
||||
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
|
||||
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
|
||||
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
|
||||
// check correct conflicts
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.conflicts().length, equalTo(3));
|
||||
ArrayList conflicts = new ArrayList<>(Arrays.asList(mergeResult.conflicts()));
|
||||
assertThat("mapper [shape] has different strategy", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different tree", isIn(conflicts));
|
||||
assertThat("mapper [shape] has different tree_levels or precision", isIn(conflicts));
|
||||
|
||||
// verify nothing changed
|
||||
FieldMapper fieldMapper = stage1.mappers().name("shape").mapper();
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
|
||||
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy();
|
||||
|
||||
assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class));
|
||||
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
|
||||
assertThat(strategy.getDistErrPct(), equalTo(0.01));
|
||||
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d)));
|
||||
assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CCW));
|
||||
|
||||
// correct mapping
|
||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
|
||||
.field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||
stage2 = parser.parse(stage2Mapping);
|
||||
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
|
||||
|
||||
// verify mapping changes, and ensure no failures
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
fieldMapper = stage1.mappers().name("shape").mapper();
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
|
||||
geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
strategy = geoShapeFieldMapper.defaultStrategy();
|
||||
|
||||
assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class));
|
||||
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
|
||||
assertThat(strategy.getDistErrPct(), equalTo(0.001));
|
||||
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d)));
|
||||
assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CW));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
|||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.queries.*;
|
||||
import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.spans.*;
|
||||
import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter;
|
||||
|
@ -1281,20 +1280,6 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
|||
assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLimitFilter() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/limit-filter.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
assertThat(filteredQuery.getFilter(), instanceOf(LimitFilter.class));
|
||||
assertThat(((LimitFilter) filteredQuery.getFilter()).getLimit(), equalTo(2));
|
||||
|
||||
assertThat(filteredQuery.getQuery(), instanceOf(TermQuery.class));
|
||||
assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTermFilterQuery() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
|
@ -1795,74 +1780,6 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
|||
return strings;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFuzzyLikeThisBuilder() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(fuzzyLikeThisQuery("name.first", "name.last").likeText("something").maxQueryTerms(12)).query();
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
parsedQuery = queryParser.parse(fuzzyLikeThisQuery("name.first", "name.last").likeText("something").maxQueryTerms(12).fuzziness(Fuzziness.build("4"))).query();
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
|
||||
Query parsedQuery1 = queryParser.parse(fuzzyLikeThisQuery("name.first", "name.last").likeText("something").maxQueryTerms(12).fuzziness(Fuzziness.build("4.0"))).query();
|
||||
assertThat(parsedQuery1, instanceOf(FuzzyLikeThisQuery.class));
|
||||
assertThat(parsedQuery, equalTo(parsedQuery1));
|
||||
|
||||
try {
|
||||
queryParser.parse(fuzzyLikeThisQuery("name.first", "name.last").likeText("something").maxQueryTerms(12).fuzziness(Fuzziness.build("4.1"))).query();
|
||||
fail("exception expected - fractional edit distance");
|
||||
} catch (ElasticsearchException ex) {
|
||||
//
|
||||
}
|
||||
|
||||
try {
|
||||
queryParser.parse(fuzzyLikeThisQuery("name.first", "name.last").likeText("something").maxQueryTerms(12).fuzziness(Fuzziness.build("-" + between(1, 100)))).query();
|
||||
fail("exception expected - negative edit distance");
|
||||
} catch (ElasticsearchException ex) {
|
||||
//
|
||||
}
|
||||
String[] queries = new String[] {
|
||||
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": \"4\"}}",
|
||||
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": \"4.00000000\"}}",
|
||||
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": \"4.\"}}",
|
||||
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4}}",
|
||||
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4.0}}"
|
||||
};
|
||||
int iters = scaledRandomIntBetween(5, 100);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
parsedQuery = queryParser.parse(new BytesArray((String) randomFrom(queries))).query();
|
||||
parsedQuery1 = queryParser.parse(new BytesArray((String) randomFrom(queries))).query();
|
||||
assertThat(parsedQuery1, instanceOf(FuzzyLikeThisQuery.class));
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
assertThat(parsedQuery, equalTo(parsedQuery1));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFuzzyLikeThis() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzyLikeThis.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
// FuzzyLikeThisQuery fuzzyLikeThisQuery = (FuzzyLikeThisQuery) parsedQuery;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFuzzyLikeFieldThisBuilder() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(fuzzyLikeThisFieldQuery("name.first").likeText("something").maxQueryTerms(12)).query();
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
// FuzzyLikeThisQuery fuzzyLikeThisQuery = (FuzzyLikeThisQuery) parsedQuery;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFuzzyLikeThisField() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzyLikeThisField.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
assertThat(parsedQuery, instanceOf(FuzzyLikeThisQuery.class));
|
||||
// FuzzyLikeThisQuery fuzzyLikeThisQuery = (FuzzyLikeThisQuery) parsedQuery;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeoDistanceFilterNamed() throws IOException {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
fuzzy_like_this:{
|
||||
fields:["name.first", "name.last"],
|
||||
like_text:"something",
|
||||
max_query_terms:12
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
fuzzy_like_this_field:{
|
||||
"name.first":{
|
||||
like_text:"something",
|
||||
max_query_terms:12
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"filtered":{
|
||||
"filter":{
|
||||
"limit":{
|
||||
"value":2
|
||||
}
|
||||
},
|
||||
"query":{
|
||||
"term":{
|
||||
"name.first":"shay"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -677,7 +677,7 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "type1", "3").setSource("field2", "value2_3"),
|
||||
client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4"));
|
||||
|
||||
assertHitCount(client().prepareSearch().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get(), 2l);
|
||||
assertHitCount(client().prepareSearch().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get(), 4l); // no-op
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -290,7 +290,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
cluster().beforeTest(getRandom(), getPerTestTransportClientRatio());
|
||||
cluster().wipe();
|
||||
randomIndexTemplate();
|
||||
logger.info("[{}#{}]: before test", getTestClass().getSimpleName(), getTestName());
|
||||
printTestMessage("before");
|
||||
} catch (OutOfMemoryError e) {
|
||||
if (e.getMessage().contains("unable to create new native thread")) {
|
||||
ElasticsearchTestCase.printStackDump(logger);
|
||||
|
@ -299,6 +299,14 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
}
|
||||
}
|
||||
|
||||
private void printTestMessage(String message) {
|
||||
if (isSuiteScopedTest(getClass())) {
|
||||
logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message);
|
||||
} else {
|
||||
logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), getTestName(), message);
|
||||
}
|
||||
}
|
||||
|
||||
private Loading randomLoadingValues() {
|
||||
return randomFrom(Loading.values());
|
||||
}
|
||||
|
@ -590,9 +598,9 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
protected final void afterInternal(boolean afterClass) throws Exception {
|
||||
boolean success = false;
|
||||
try {
|
||||
logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName());
|
||||
clearDisruptionScheme();
|
||||
final Scope currentClusterScope = getCurrentClusterScope();
|
||||
printTestMessage("cleaning up after");
|
||||
clearDisruptionScheme();
|
||||
try {
|
||||
if (cluster() != null) {
|
||||
if (currentClusterScope != Scope.TEST) {
|
||||
|
@ -614,7 +622,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST
|
||||
}
|
||||
}
|
||||
logger.info("[{}#{}]: cleaned up after test", getTestClass().getSimpleName(), getTestName());
|
||||
printTestMessage("cleaned up after");
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
|
|
@ -87,6 +87,7 @@ public abstract class ElasticsearchSingleNodeTest extends ElasticsearchTestCase
|
|||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName());
|
||||
super.tearDown();
|
||||
cleanup(resetNodeAfterTest());
|
||||
}
|
||||
|
|
|
@ -19,12 +19,14 @@
|
|||
|
||||
package org.elasticsearch.test.store;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.MMapDirectory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.NRTCachingDirectory;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
|
@ -32,8 +34,11 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.index.store.DirectoryService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.store.fs.*;
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Collection;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -122,11 +127,26 @@ public class MockDirectoryHelper {
|
|||
private final boolean crash;
|
||||
private volatile RuntimeException closeException;
|
||||
private final Object lock = new Object();
|
||||
private final Set<String> superUnSyncedFiles;
|
||||
private final Random superRandomState;
|
||||
|
||||
public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, ESLogger logger, boolean crash) {
|
||||
super(random, delegate);
|
||||
this.crash = crash;
|
||||
this.logger = logger;
|
||||
|
||||
// TODO: remove all this and cutover to MockFS (DisableFsyncFS) instead
|
||||
try {
|
||||
Field field = MockDirectoryWrapper.class.getDeclaredField("unSyncedFiles");
|
||||
field.setAccessible(true);
|
||||
superUnSyncedFiles = (Set<String>) field.get(this);
|
||||
|
||||
field = MockDirectoryWrapper.class.getDeclaredField("randomState");
|
||||
field.setAccessible(true);
|
||||
superRandomState = (Random) field.get(this);
|
||||
} catch (ReflectiveOperationException roe) {
|
||||
throw new RuntimeException(roe);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -144,7 +164,32 @@ public class MockDirectoryHelper {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if {@link #in} must sync its files.
|
||||
* Currently, only {@link NRTCachingDirectory} requires sync'ing its files
|
||||
* because otherwise they are cached in an internal {@link RAMDirectory}. If
|
||||
* other directories require that too, they should be added to this method.
|
||||
*/
|
||||
private boolean mustSync() {
|
||||
Directory delegate = in;
|
||||
while (delegate instanceof FilterDirectory) {
|
||||
if (delegate instanceof NRTCachingDirectory) {
|
||||
return true;
|
||||
}
|
||||
delegate = ((FilterDirectory) delegate).getDelegate();
|
||||
}
|
||||
return delegate instanceof NRTCachingDirectory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void sync(Collection<String> names) throws IOException {
|
||||
// don't wear out our hardware so much in tests.
|
||||
if (LuceneTestCase.rarely(superRandomState) || mustSync()) {
|
||||
super.sync(names);
|
||||
} else {
|
||||
superUnSyncedFiles.removeAll(names);
|
||||
}
|
||||
}
|
||||
|
||||
public void awaitClosed(long timeout) throws InterruptedException {
|
||||
synchronized (lock) {
|
||||
|
|
|
@ -26,17 +26,13 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.LockFactory;
|
||||
import org.apache.lucene.store.StoreRateLimiting;
|
||||
import org.apache.lucene.util.AbstractRandomizedTest;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.InternalEngine;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.IndexShardException;
|
||||
import org.elasticsearch.index.shard.IndexShardState;
|
||||
|
@ -91,8 +87,10 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
// When the the internal engine closes we do a rollback, which removes uncommitted segments
|
||||
// By doing a commit flush we perform a Lucene commit, but don't clear the translog,
|
||||
// so that even in tests where don't flush we can check the integrity of the Lucene index
|
||||
if (indexShard.engine().hasUncommittedChanges()) { // only if we have any changes
|
||||
logger.info("{} flushing in order to run checkindex", indexShard.shardId());
|
||||
Releasables.close(indexShard.engine().snapshotIndex()); // Keep translog for tests that rely on replaying it
|
||||
}
|
||||
logger.info("{} flush finished in beforeIndexShardClosed", indexShard.shardId());
|
||||
canRun = true;
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue