Merge remote-tracking branch 'es/master' into ccr
* es/master: (24 commits) Reduce synchronization on field data cache add json-processor support for non-map json types (#27335) Properly format IndexGraveyard deletion date as date (#27362) Upgrade AWS SDK Jackson Databind to 2.6.7.1 Stop responding to ping requests before master abdication (#27329) [Test] Fix POI version in packaging tests Allow affix settings to specify dependencies (#27161) Tests: Improve size regex in documentation test (#26879) reword comment Remove unnecessary logger creation for doc values field data [Geo] Decouple geojson parse logic from ShapeBuilders [DOCS] Fixed link to docker content Plugins: Add versionless alias to all security policy codebase properties (#26756) [Test] #27342 Fix SearchRequests#testValidate [DOCS] Move X-Pack-specific Docker content (#27333) Fail queries with scroll that explicitely set request_cache (#27342) [Test] Fix S3BlobStoreContainerTests.testNumberOfMultiparts() Set minimum_master_nodes to all nodes for REST tests (#27344) [Tests] Relax allowed delta in extended_stats aggregation (#27171) Remove S3 output stream (#27280) ...
This commit is contained in:
commit
ba0b7079f9
|
@ -64,10 +64,10 @@ class ClusterConfiguration {
|
|||
|
||||
/**
|
||||
* Configuration of the setting <tt>discovery.zen.minimum_master_nodes</tt> on the nodes.
|
||||
* In case of more than one node, this defaults to (number of nodes / 2) + 1
|
||||
* In case of more than one node, this defaults to the number of nodes
|
||||
*/
|
||||
@Input
|
||||
Closure<Integer> minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes().intdiv(2) + 1 : -1 }
|
||||
Closure<Integer> minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes() : -1 }
|
||||
|
||||
@Input
|
||||
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.geo.builders.MultiPointBuilder;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
|
@ -189,7 +189,7 @@ public class QueryDSLDocumentationTests extends ESTestCase {
|
|||
// tag::geo_shape
|
||||
GeoShapeQueryBuilder qb = geoShapeQuery(
|
||||
"pin.location", // <1>
|
||||
ShapeBuilders.newMultiPoint( // <2>
|
||||
new MultiPointBuilder( // <2>
|
||||
new CoordinatesBuilder()
|
||||
.coordinate(0, 0)
|
||||
.coordinate(0, 10)
|
||||
|
|
|
@ -54,15 +54,23 @@ final class SettingsUpdater {
|
|||
transientSettings.put(currentState.metaData().transientSettings());
|
||||
changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient");
|
||||
|
||||
|
||||
Settings.Builder persistentSettings = Settings.builder();
|
||||
persistentSettings.put(currentState.metaData().persistentSettings());
|
||||
changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent");
|
||||
|
||||
final ClusterState clusterState;
|
||||
if (changed) {
|
||||
Settings transientFinalSettings = transientSettings.build();
|
||||
Settings persistentFinalSettings = persistentSettings.build();
|
||||
// both transient and persistent settings must be consistent by itself we can't allow dependencies to be
|
||||
// in either of them otherwise a full cluster restart will break the settings validation
|
||||
clusterSettings.validate(transientFinalSettings, true);
|
||||
clusterSettings.validate(persistentFinalSettings, true);
|
||||
|
||||
MetaData.Builder metaData = MetaData.builder(currentState.metaData())
|
||||
.persistentSettings(persistentSettings.build())
|
||||
.transientSettings(transientSettings.build());
|
||||
.persistentSettings(persistentFinalSettings)
|
||||
.transientSettings(transientFinalSettings);
|
||||
|
||||
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
|
||||
boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings())
|
||||
|
|
|
@ -77,7 +77,7 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction<P
|
|||
}
|
||||
final Settings.Builder templateSettingsBuilder = Settings.builder();
|
||||
templateSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
indexScopedSettings.validate(templateSettingsBuilder);
|
||||
indexScopedSettings.validate(templateSettingsBuilder.build(), true); // templates must be consistent with regards to dependencies
|
||||
indexTemplateService.putTemplate(new MetaDataIndexTemplateService.PutRequest(cause, request.name())
|
||||
.patterns(request.patterns())
|
||||
.order(request.order())
|
||||
|
|
|
@ -169,6 +169,10 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest
|
|||
validationException =
|
||||
addValidationError("using [from] is not allowed in a scroll context", validationException);
|
||||
}
|
||||
if (requestCache != null && requestCache && scroll() != null) {
|
||||
validationException =
|
||||
addValidationError("[request_cache] cannot be used in a a scroll context", validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
|
|
|
@ -199,28 +199,28 @@ final class Security {
|
|||
try {
|
||||
// set codebase properties
|
||||
for (URL url : codebases) {
|
||||
String shortName = PathUtils.get(url.toURI()).getFileName().toString();
|
||||
if (shortName.endsWith(".jar") == false) {
|
||||
String fileName = PathUtils.get(url.toURI()).getFileName().toString();
|
||||
if (fileName.endsWith(".jar") == false) {
|
||||
continue; // tests :(
|
||||
}
|
||||
String property = "codebase." + shortName;
|
||||
if (shortName.startsWith("elasticsearch-rest-client")) {
|
||||
// The rest client is currently the only example where we have an elasticsearch built artifact
|
||||
// which needs special permissions in policy files when used. This temporary solution is to
|
||||
// pass in an extra system property that omits the -version.jar suffix the other properties have.
|
||||
// That allows the snapshots to reference snapshot builds of the client, and release builds to
|
||||
// referenced release builds of the client, all with the same grant statements.
|
||||
final String esVersion = Version.CURRENT + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "");
|
||||
final int index = property.indexOf("-" + esVersion + ".jar");
|
||||
assert index >= 0;
|
||||
String restClientAlias = property.substring(0, index);
|
||||
propertiesSet.add(restClientAlias);
|
||||
System.setProperty(restClientAlias, url.toString());
|
||||
// We attempt to use a versionless identifier for each codebase. This assumes a specific version
|
||||
// format in the jar filename. While we cannot ensure all jars in all plugins use this format, nonconformity
|
||||
// only means policy grants would need to include the entire jar filename as they always have before.
|
||||
String property = "codebase." + fileName;
|
||||
String aliasProperty = "codebase." + fileName.replaceFirst("-\\d+\\.\\d+.*\\.jar", "");
|
||||
if (aliasProperty.equals(property) == false) {
|
||||
propertiesSet.add(aliasProperty);
|
||||
String previous = System.setProperty(aliasProperty, url.toString());
|
||||
if (previous != null) {
|
||||
throw new IllegalStateException("codebase property already set: " + aliasProperty + " -> " + previous +
|
||||
", cannot set to " + url.toString());
|
||||
}
|
||||
}
|
||||
propertiesSet.add(property);
|
||||
String previous = System.setProperty(property, url.toString());
|
||||
if (previous != null) {
|
||||
throw new IllegalStateException("codebase property already set: " + shortName + "->" + previous);
|
||||
throw new IllegalStateException("codebase property already set: " + property + " -> " + previous +
|
||||
", cannot set to " + url.toString());
|
||||
}
|
||||
}
|
||||
return Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toURI()));
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -432,7 +434,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
builder.startObject();
|
||||
builder.field(INDEX_KEY);
|
||||
index.toXContent(builder, params);
|
||||
builder.timeValueField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis, TimeUnit.MILLISECONDS);
|
||||
builder.dateField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
|
|
|
@ -220,10 +220,9 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
private void onlyCreateIndex(final CreateIndexClusterStateUpdateRequest request,
|
||||
final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||
Settings.Builder updatedSettingsBuilder = Settings.builder();
|
||||
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
indexScopedSettings.validate(updatedSettingsBuilder);
|
||||
request.settings(updatedSettingsBuilder.build());
|
||||
|
||||
Settings build = updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build();
|
||||
indexScopedSettings.validate(build, true); // we do validate here - index setting must be consistent
|
||||
request.settings(build);
|
||||
clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]",
|
||||
new IndexCreationTask(logger, allocationService, request, listener, indicesService, aliasValidator, xContentRegistry, settings,
|
||||
this::validate));
|
||||
|
@ -420,7 +419,6 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
tmpImdBuilder.primaryTerm(shardId, primaryTerm);
|
||||
}
|
||||
}
|
||||
|
||||
// Set up everything, now locally create the index to see that things are ok, and apply
|
||||
final IndexMetaData tmpImd = tmpImdBuilder.build();
|
||||
ActiveShardCount waitForActiveShards = request.waitForActiveShards();
|
||||
|
|
|
@ -276,7 +276,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
}
|
||||
|
||||
try {
|
||||
indexScopedSettings.validate(request.settings);
|
||||
indexScopedSettings.validate(request.settings, true); // templates must be consistent with regards to dependencies
|
||||
} catch (IllegalArgumentException iae) {
|
||||
validationErrors.add(iae.getMessage());
|
||||
for (Throwable t : iae.getSuppressed()) {
|
||||
|
|
|
@ -54,6 +54,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext;
|
||||
|
||||
|
@ -163,7 +164,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
|||
Settings.Builder settingsForOpenIndices = Settings.builder();
|
||||
final Set<String> skippedSettings = new HashSet<>();
|
||||
|
||||
indexScopedSettings.validate(normalizedSettings);
|
||||
indexScopedSettings.validate(normalizedSettings, false); // don't validate dependencies here we check it below
|
||||
// never allow to change the number of shards
|
||||
for (String key : normalizedSettings.keySet()) {
|
||||
Setting setting = indexScopedSettings.get(key);
|
||||
|
@ -240,7 +241,9 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
|||
if (preserveExisting) {
|
||||
indexSettings.put(indexMetaData.getSettings());
|
||||
}
|
||||
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings));
|
||||
Settings finalSettings = indexSettings.build();
|
||||
indexScopedSettings.validate(finalSettings.filter(k -> indexScopedSettings.isPrivateSetting(k) == false), true);
|
||||
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(finalSettings));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -254,7 +257,9 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
|||
if (preserveExisting) {
|
||||
indexSettings.put(indexMetaData.getSettings());
|
||||
}
|
||||
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings));
|
||||
Settings finalSettings = indexSettings.build();
|
||||
indexScopedSettings.validate(finalSettings.filter(k -> indexScopedSettings.isPrivateSetting(k) == false), true);
|
||||
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(finalSettings));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,316 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.builders.CircleBuilder;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiLineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPointBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PointBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
|
||||
import org.elasticsearch.common.geo.parsers.CoordinateNode;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Enumeration that lists all {@link GeoShapeType}s that can be parsed and indexed
|
||||
*/
|
||||
public enum GeoShapeType {
|
||||
POINT("point") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
return new PointBuilder().coordinate(validate(coordinates, coerce).coordinate);
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
if (coordinates.isEmpty()) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid number of points (0) provided when expecting a single coordinate ([lat, lng])");
|
||||
} else if (coordinates.children != null) {
|
||||
throw new ElasticsearchParseException("multipoint data provided when single point data expected.");
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
MULTIPOINT("multipoint") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
CoordinatesBuilder coordinatesBuilder = new CoordinatesBuilder();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
coordinatesBuilder.coordinate(node.coordinate);
|
||||
}
|
||||
return new MultiPointBuilder(coordinatesBuilder.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||
if (coordinates.coordinate != null) {
|
||||
throw new ElasticsearchParseException("single coordinate found when expecting an array of " +
|
||||
"coordinates. change type to point or change data to an array of >0 coordinates");
|
||||
}
|
||||
throw new ElasticsearchParseException("no data provided for multipoint object when expecting " +
|
||||
">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])");
|
||||
} else {
|
||||
for (CoordinateNode point : coordinates.children) {
|
||||
POINT.validate(point, coerce);
|
||||
}
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
|
||||
},
|
||||
LINESTRING("linestring") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
CoordinatesBuilder line = new CoordinatesBuilder();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
line.coordinate(node.coordinate);
|
||||
}
|
||||
return new LineStringBuilder(line);
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
if (coordinates.children.size() < 2) {
|
||||
throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)",
|
||||
coordinates.children.size());
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
MULTILINESTRING("multilinestring") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
MultiLineStringBuilder multiline = new MultiLineStringBuilder();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
multiline.linestring(LineStringBuilder.class.cast(LINESTRING.getBuilder(node, radius, orientation, coerce)));
|
||||
}
|
||||
return multiline;
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
if (coordinates.children.size() < 1) {
|
||||
throw new ElasticsearchParseException("invalid number of lines in MultiLineString (found [{}] - must be >= 1)",
|
||||
coordinates.children.size());
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
POLYGON("polygon") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
// build shell
|
||||
LineStringBuilder shell = LineStringBuilder.class.cast(LINESTRING.getBuilder(coordinates.children.get(0),
|
||||
radius, orientation, coerce));
|
||||
// build polygon with shell and holes
|
||||
PolygonBuilder polygon = new PolygonBuilder(shell, orientation);
|
||||
for (int i = 1; i < coordinates.children.size(); ++i) {
|
||||
CoordinateNode child = coordinates.children.get(i);
|
||||
LineStringBuilder hole = LineStringBuilder.class.cast(LINESTRING.getBuilder(child, radius, orientation, coerce));
|
||||
polygon.hole(hole);
|
||||
}
|
||||
return polygon;
|
||||
}
|
||||
|
||||
void validateLinearRing(CoordinateNode coordinates, boolean coerce) {
|
||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||
String error = "Invalid LinearRing found.";
|
||||
error += (coordinates.coordinate == null) ?
|
||||
" No coordinate array provided" : " Found a single coordinate when expecting a coordinate array";
|
||||
throw new ElasticsearchParseException(error);
|
||||
}
|
||||
|
||||
int numValidPts = coerce ? 3 : 4;
|
||||
if (coordinates.children.size() < numValidPts) {
|
||||
throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])",
|
||||
coordinates.children.size(), numValidPts);
|
||||
}
|
||||
// close linear ring iff coerce is set and ring is open, otherwise throw parse exception
|
||||
if (!coordinates.children.get(0).coordinate.equals(
|
||||
coordinates.children.get(coordinates.children.size() - 1).coordinate)) {
|
||||
if (coerce == true) {
|
||||
coordinates.children.add(coordinates.children.get(0));
|
||||
} else {
|
||||
throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
/**
|
||||
* Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring)
|
||||
* A LinearRing is closed LineString with 4 or more positions. The first and last positions
|
||||
* are equivalent (they represent equivalent points). Though a LinearRing is not explicitly
|
||||
* represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition.
|
||||
*/
|
||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates");
|
||||
}
|
||||
for (CoordinateNode ring : coordinates.children) {
|
||||
validateLinearRing(ring, coerce);
|
||||
}
|
||||
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
MULTIPOLYGON("multipolygon") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
MultiPolygonBuilder polygons = new MultiPolygonBuilder(orientation);
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
polygons.polygon(PolygonBuilder.class.cast(POLYGON.getBuilder(node, radius, orientation, coerce)));
|
||||
}
|
||||
return polygons;
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
// noop; todo validate at least 1 polygon to ensure valid multipolygon
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
ENVELOPE("envelope") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
// verify coordinate bounds, correct if necessary
|
||||
Coordinate uL = coordinates.children.get(0).coordinate;
|
||||
Coordinate lR = coordinates.children.get(1).coordinate;
|
||||
if (((lR.x < uL.x) || (uL.y < lR.y))) {
|
||||
Coordinate uLtmp = uL;
|
||||
uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y));
|
||||
lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y));
|
||||
}
|
||||
return new EnvelopeBuilder(uL, lR);
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
// validate the coordinate array for envelope type
|
||||
if (coordinates.children.size() != 2) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates",
|
||||
coordinates.children.size(), GeoShapeType.ENVELOPE.shapename);
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
CIRCLE("circle") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
return new CircleBuilder().center(coordinates.coordinate).radius(radius);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
// noop
|
||||
return coordinates;
|
||||
}
|
||||
},
|
||||
GEOMETRYCOLLECTION("geometrycollection") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
// noop, handled in parser
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
CoordinateNode validate(CoordinateNode coordinates, boolean coerce) {
|
||||
// noop
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
private final String shapename;
|
||||
private static Map<String, GeoShapeType> shapeTypeMap = new HashMap<>();
|
||||
|
||||
static {
|
||||
for (GeoShapeType type : values()) {
|
||||
shapeTypeMap.put(type.shapename, type);
|
||||
}
|
||||
}
|
||||
|
||||
GeoShapeType(String shapename) {
|
||||
this.shapename = shapename;
|
||||
}
|
||||
|
||||
public String shapeName() {
|
||||
return shapename;
|
||||
}
|
||||
|
||||
public static GeoShapeType forName(String geoshapename) {
|
||||
String typename = geoshapename.toLowerCase(Locale.ROOT);
|
||||
if (shapeTypeMap.containsKey(typename)) {
|
||||
return shapeTypeMap.get(typename);
|
||||
}
|
||||
throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]");
|
||||
}
|
||||
|
||||
public abstract ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
ShapeBuilder.Orientation orientation, boolean coerce);
|
||||
abstract CoordinateNode validate(CoordinateNode coordinates, boolean coerce);
|
||||
|
||||
public static List<Entry> getShapeWriteables() {
|
||||
List<Entry> namedWriteables = new ArrayList<>();
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, PointBuilder.TYPE.shapeName(), PointBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, CircleBuilder.TYPE.shapeName(), CircleBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, EnvelopeBuilder.TYPE.shapeName(), EnvelopeBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiPointBuilder.TYPE.shapeName(), MultiPointBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, LineStringBuilder.TYPE.shapeName(), LineStringBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiLineStringBuilder.TYPE.shapeName(), MultiLineStringBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, PolygonBuilder.TYPE.shapeName(), PolygonBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiPolygonBuilder.TYPE.shapeName(), MultiPolygonBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, GeometryCollectionBuilder.TYPE.shapeName(), GeometryCollectionBuilder::new));
|
||||
return namedWriteables;
|
||||
}
|
||||
}
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
|
@ -31,9 +34,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class CircleBuilder extends ShapeBuilder {
|
||||
public class CircleBuilder extends ShapeBuilder<Circle, CircleBuilder> {
|
||||
|
||||
public static final String FIELD_RADIUS = "radius";
|
||||
public static final ParseField FIELD_RADIUS = new ParseField("radius");
|
||||
public static final GeoShapeType TYPE = GeoShapeType.CIRCLE;
|
||||
|
||||
private DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
|
@ -148,9 +151,9 @@ public class CircleBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_RADIUS, unit.toString(radius));
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(FIELD_RADIUS.getPreferredName(), unit.toString(radius));
|
||||
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
toXContent(builder, center);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
|
|
@ -1,155 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The {@link CoordinateCollection} is an abstract base implementation for {@link LineStringBuilder} and {@link MultiPointBuilder}.
|
||||
* It holds a common list of {@link Coordinate}, provides setters for adding elements to the list and can render this to XContent.
|
||||
*/
|
||||
public abstract class CoordinateCollection<E extends CoordinateCollection<E>> extends ShapeBuilder {
|
||||
|
||||
protected final List<Coordinate> coordinates;
|
||||
|
||||
/**
|
||||
* Construct a new collection of coordinates.
|
||||
* @param coordinates an initial list of coordinates
|
||||
* @throws IllegalArgumentException if coordinates is <tt>null</tt> or empty
|
||||
*/
|
||||
protected CoordinateCollection(List<Coordinate> coordinates) {
|
||||
if (coordinates == null || coordinates.size() == 0) {
|
||||
throw new IllegalArgumentException("cannot create point collection with empty set of points");
|
||||
}
|
||||
this.coordinates = coordinates;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
protected CoordinateCollection(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
coordinates = new ArrayList<>(size);
|
||||
for (int i=0; i < size; i++) {
|
||||
coordinates.add(readFromStream(in));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(coordinates.size());
|
||||
for (Coordinate point : coordinates) {
|
||||
writeCoordinateTo(point, out);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private E thisRef() {
|
||||
return (E)this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new coordinate to the collection
|
||||
* @param longitude longitude of the coordinate
|
||||
* @param latitude latitude of the coordinate
|
||||
* @return this
|
||||
*/
|
||||
public E coordinate(double longitude, double latitude) {
|
||||
return this.coordinate(new Coordinate(longitude, latitude));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new coordinate to the collection
|
||||
* @param coordinate coordinate of the point
|
||||
* @return this
|
||||
*/
|
||||
public E coordinate(Coordinate coordinate) {
|
||||
this.coordinates.add(coordinate);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a array of coordinates to the collection
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
public E coordinates(Coordinate...coordinates) {
|
||||
return this.coordinates(Arrays.asList(coordinates));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a collection of coordinates to the collection
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
public E coordinates(Collection<? extends Coordinate> coordinates) {
|
||||
this.coordinates.addAll(coordinates);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy all coordinate to a new Array
|
||||
*
|
||||
* @param closed if set to true the first point of the array is repeated as last element
|
||||
* @return Array of coordinates
|
||||
*/
|
||||
protected Coordinate[] coordinates(boolean closed) {
|
||||
Coordinate[] result = coordinates.toArray(new Coordinate[coordinates.size() + (closed?1:0)]);
|
||||
if(closed) {
|
||||
result[result.length-1] = result[0];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* builds an array of coordinates to a {@link XContentBuilder}
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
* @return the builder
|
||||
*/
|
||||
protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException {
|
||||
builder.startArray();
|
||||
for(Coordinate coord : coordinates) {
|
||||
toXContent(builder, coord);
|
||||
}
|
||||
if(closed) {
|
||||
Coordinate start = coordinates.get(0);
|
||||
Coordinate end = coordinates.get(coordinates.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
toXContent(builder, coordinates.get(0));
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
|
@ -29,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class EnvelopeBuilder extends ShapeBuilder {
|
||||
public class EnvelopeBuilder extends ShapeBuilder<Rectangle, EnvelopeBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE;
|
||||
|
||||
|
@ -71,8 +73,8 @@ public class EnvelopeBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
toXContent(builder, topLeft);
|
||||
toXContent(builder, bottomRight);
|
||||
builder.endArray();
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -125,8 +127,8 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.startArray(FIELD_GEOMETRIES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.startArray(ShapeParser.FIELD_GEOMETRIES.getPreferredName());
|
||||
for (ShapeBuilder shape : shapes) {
|
||||
shape.toXContent(builder, params);
|
||||
}
|
||||
|
|
|
@ -24,17 +24,18 @@ import com.vividsolutions.jts.geom.Geometry;
|
|||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class LineStringBuilder extends CoordinateCollection<LineStringBuilder> {
|
||||
public class LineStringBuilder extends ShapeBuilder<JtsGeometry, LineStringBuilder> {
|
||||
public static final GeoShapeType TYPE = GeoShapeType.LINESTRING;
|
||||
|
||||
/**
|
||||
|
@ -65,8 +66,8 @@ public class LineStringBuilder extends CoordinateCollection<LineStringBuilder> {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
coordinatesToXcontent(builder, false);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -91,7 +92,7 @@ public class LineStringBuilder extends CoordinateCollection<LineStringBuilder> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
public JtsGeometry build() {
|
||||
Coordinate[] coordinates = this.coordinates.toArray(new Coordinate[this.coordinates.size()]);
|
||||
Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
|
@ -168,21 +169,4 @@ public class LineStringBuilder extends CoordinateCollection<LineStringBuilder> {
|
|||
}
|
||||
return coordinates;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(coordinates);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
LineStringBuilder other = (LineStringBuilder) obj;
|
||||
return Objects.equals(coordinates, other.coordinates);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
@ -27,21 +29,19 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MultiLineStringBuilder extends ShapeBuilder {
|
||||
public class MultiLineStringBuilder extends ShapeBuilder<JtsGeometry, MultiLineStringBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING;
|
||||
|
||||
private final ArrayList<LineStringBuilder> lines = new ArrayList<>();
|
||||
|
||||
public MultiLineStringBuilder() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
|
@ -52,6 +52,10 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
public MultiLineStringBuilder() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(lines.size());
|
||||
|
@ -81,8 +85,8 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
builder.startArray();
|
||||
for(LineStringBuilder line : lines) {
|
||||
line.coordinatesToXcontent(builder, false);
|
||||
|
@ -93,7 +97,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
public JtsGeometry build() {
|
||||
final Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> parts = new ArrayList<>();
|
||||
|
|
|
@ -21,7 +21,9 @@ package org.elasticsearch.common.geo.builders;
|
|||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.XShapeCollection;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
@ -32,7 +34,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MultiPointBuilder extends CoordinateCollection<MultiPointBuilder> {
|
||||
public class MultiPointBuilder extends ShapeBuilder<XShapeCollection<Point>, MultiPointBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT;
|
||||
|
||||
|
@ -54,15 +56,15 @@ public class MultiPointBuilder extends CoordinateCollection<MultiPointBuilder> {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
super.coordinatesToXcontent(builder, false);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
public XShapeCollection<Point> build() {
|
||||
//Could wrap JtsGeometry but probably slower due to conversions to/from JTS in relate()
|
||||
//MultiPoint geometry = FACTORY.createMultiPoint(points.toArray(new Coordinate[points.size()]));
|
||||
List<Point> shapes = new ArrayList<>(coordinates.size());
|
||||
|
@ -78,21 +80,4 @@ public class MultiPointBuilder extends CoordinateCollection<MultiPointBuilder> {
|
|||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(coordinates);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
MultiPointBuilder other = (MultiPointBuilder) obj;
|
||||
return Objects.equals(coordinates, other.coordinates);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
|
@ -102,9 +104,9 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT));
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT));
|
||||
builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
for(PolygonBuilder polygon : polygons) {
|
||||
builder.startArray();
|
||||
polygon.coordinatesArray(builder, params);
|
||||
|
|
|
@ -19,86 +19,78 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class PointBuilder extends ShapeBuilder {
|
||||
public class PointBuilder extends ShapeBuilder<Point, PointBuilder> {
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POINT;
|
||||
|
||||
private Coordinate coordinate;
|
||||
|
||||
/**
|
||||
* Create a point at [0.0,0.0]
|
||||
*/
|
||||
public PointBuilder() {
|
||||
this.coordinate = ZERO_ZERO;
|
||||
super();
|
||||
this.coordinates.add(ZERO_ZERO);
|
||||
}
|
||||
|
||||
public PointBuilder(double lon, double lat) {
|
||||
//super(new ArrayList<>(1));
|
||||
super();
|
||||
this.coordinates.add(new Coordinate(lon, lat));
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PointBuilder(StreamInput in) throws IOException {
|
||||
coordinate = readFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
writeCoordinateTo(coordinate, out);
|
||||
super(in);
|
||||
}
|
||||
|
||||
public PointBuilder coordinate(Coordinate coordinate) {
|
||||
this.coordinate = coordinate;
|
||||
this.coordinates.set(0, coordinate);
|
||||
return this;
|
||||
}
|
||||
|
||||
public double longitude() {
|
||||
return coordinate.x;
|
||||
return coordinates.get(0).x;
|
||||
}
|
||||
|
||||
public double latitude() {
|
||||
return coordinate.y;
|
||||
return coordinates.get(0).y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new point
|
||||
*
|
||||
* @param longitude longitude of the point
|
||||
* @param latitude latitude of the point
|
||||
* @return a new {@link PointBuilder}
|
||||
*/
|
||||
public static PointBuilder newPoint(double longitude, double latitude) {
|
||||
return new PointBuilder().coordinate(new Coordinate(longitude, latitude));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_COORDINATES);
|
||||
toXContent(builder, coordinate);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
toXContent(builder, coordinates.get(0));
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point build() {
|
||||
return SPATIAL_CONTEXT.makePoint(coordinate.x, coordinate.y);
|
||||
return SPATIAL_CONTEXT.makePoint(coordinates.get(0).x, coordinates.get(0).y);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(coordinate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PointBuilder other = (PointBuilder) obj;
|
||||
return Objects.equals(coordinate, other.coordinate);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,12 +26,15 @@ import com.vividsolutions.jts.geom.LinearRing;
|
|||
import com.vividsolutions.jts.geom.MultiPolygon;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -49,7 +52,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
* Methods to wrap polygons at the dateline and building shapes from the data held by the
|
||||
* builder.
|
||||
*/
|
||||
public class PolygonBuilder extends ShapeBuilder {
|
||||
public class PolygonBuilder extends ShapeBuilder<JtsGeometry, PolygonBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
|
||||
|
||||
|
@ -222,7 +225,7 @@ public class PolygonBuilder extends ShapeBuilder {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
public JtsGeometry build() {
|
||||
return jtsGeometry(buildGeometry(FACTORY, wrapdateline));
|
||||
}
|
||||
|
||||
|
@ -237,9 +240,9 @@ public class PolygonBuilder extends ShapeBuilder {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapeName());
|
||||
builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT));
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
|
||||
builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT));
|
||||
builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName());
|
||||
coordinatesArray(builder, params);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
|
|
|
@ -25,18 +25,14 @@ import com.vividsolutions.jts.geom.GeometryFactory;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.locationtech.spatial4j.context.jts.JtsSpatialContext;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
@ -45,14 +41,16 @@ import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Basic class for building GeoJSON shapes like Polygons, Linestrings, etc
|
||||
*/
|
||||
public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
||||
public abstract class ShapeBuilder<T extends Shape, E extends ShapeBuilder<T,E>> implements NamedWriteable, ToXContentObject {
|
||||
|
||||
protected static final Logger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
|
||||
|
||||
|
@ -63,6 +61,8 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
|||
DEBUG = Assertions.ENABLED;
|
||||
}
|
||||
|
||||
protected final List<Coordinate> coordinates;
|
||||
|
||||
public static final double DATELINE = 180;
|
||||
|
||||
/**
|
||||
|
@ -85,7 +85,103 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
|||
/** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#index() */
|
||||
protected static final boolean AUTO_INDEX_JTS_GEOMETRY = true;//may want to turn off once SpatialStrategy impls do it.
|
||||
|
||||
/** default ctor */
|
||||
protected ShapeBuilder() {
|
||||
coordinates = new ArrayList<>();
|
||||
}
|
||||
|
||||
/** ctor from list of coordinates */
|
||||
protected ShapeBuilder(List<Coordinate> coordinates) {
|
||||
if (coordinates == null || coordinates.size() == 0) {
|
||||
throw new IllegalArgumentException("cannot create point collection with empty set of points");
|
||||
}
|
||||
this.coordinates = coordinates;
|
||||
}
|
||||
|
||||
/** ctor from serialized stream input */
|
||||
protected ShapeBuilder(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
coordinates = new ArrayList<>(size);
|
||||
for (int i=0; i < size; i++) {
|
||||
coordinates.add(readFromStream(in));
|
||||
}
|
||||
}
|
||||
|
||||
protected static Coordinate readFromStream(StreamInput in) throws IOException {
|
||||
return new Coordinate(in.readDouble(), in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(coordinates.size());
|
||||
for (Coordinate point : coordinates) {
|
||||
writeCoordinateTo(point, out);
|
||||
}
|
||||
}
|
||||
|
||||
protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException {
|
||||
out.writeDouble(coordinate.x);
|
||||
out.writeDouble(coordinate.y);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private E thisRef() {
|
||||
return (E)this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new coordinate to the collection
|
||||
* @param longitude longitude of the coordinate
|
||||
* @param latitude latitude of the coordinate
|
||||
* @return this
|
||||
*/
|
||||
public E coordinate(double longitude, double latitude) {
|
||||
return this.coordinate(new Coordinate(longitude, latitude));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new coordinate to the collection
|
||||
* @param coordinate coordinate of the point
|
||||
* @return this
|
||||
*/
|
||||
public E coordinate(Coordinate coordinate) {
|
||||
this.coordinates.add(coordinate);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a array of coordinates to the collection
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
public E coordinates(Coordinate...coordinates) {
|
||||
return this.coordinates(Arrays.asList(coordinates));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a collection of coordinates to the collection
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
public E coordinates(Collection<? extends Coordinate> coordinates) {
|
||||
this.coordinates.addAll(coordinates);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy all coordinate to a new Array
|
||||
*
|
||||
* @param closed if set to true the first point of the array is repeated as last element
|
||||
* @return Array of coordinates
|
||||
*/
|
||||
protected Coordinate[] coordinates(boolean closed) {
|
||||
Coordinate[] result = coordinates.toArray(new Coordinate[coordinates.size() + (closed?1:0)]);
|
||||
if(closed) {
|
||||
result[result.length-1] = result[0];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
protected JtsGeometry jtsGeometry(Geometry geom) {
|
||||
|
@ -104,84 +200,7 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
|||
* the builder looses its validity. So this method should only be called once on a builder
|
||||
* @return new {@link Shape} defined by the builder
|
||||
*/
|
||||
public abstract Shape build();
|
||||
|
||||
/**
|
||||
* Recursive method which parses the arrays of coordinates used to define
|
||||
* Shapes
|
||||
*
|
||||
* @param parser
|
||||
* Parser that will be read from
|
||||
* @return CoordinateNode representing the start of the coordinate tree
|
||||
* @throws IOException
|
||||
* Thrown if an error occurs while reading from the
|
||||
* XContentParser
|
||||
*/
|
||||
private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
||||
// Base cases
|
||||
if (token != XContentParser.Token.START_ARRAY &&
|
||||
token != XContentParser.Token.END_ARRAY &&
|
||||
token != XContentParser.Token.VALUE_NULL) {
|
||||
double lon = parser.doubleValue();
|
||||
token = parser.nextToken();
|
||||
double lat = parser.doubleValue();
|
||||
token = parser.nextToken();
|
||||
while (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
return new CoordinateNode(new Coordinate(lon, lat));
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
throw new IllegalArgumentException("coordinates cannot contain NULL values)");
|
||||
}
|
||||
|
||||
List<CoordinateNode> nodes = new ArrayList<>();
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
nodes.add(parseCoordinates(parser));
|
||||
token = parser.nextToken();
|
||||
}
|
||||
|
||||
return new CoordinateNode(nodes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShapeBuilder} from {@link XContent}
|
||||
* @param parser parser to read the GeoShape from
|
||||
* @return {@link ShapeBuilder} read from the parser or null
|
||||
* if the parsers current token has been <code>null</code>
|
||||
* @throws IOException if the input could not be read
|
||||
*/
|
||||
public static ShapeBuilder parse(XContentParser parser) throws IOException {
|
||||
return GeoShapeType.parse(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShapeBuilder} from {@link XContent}
|
||||
* @param parser parser to read the GeoShape from
|
||||
* @param geoDocMapper document field mapper reference required for spatial parameters relevant
|
||||
* to the shape construction process (e.g., orientation)
|
||||
* todo: refactor to place build specific parameters in the SpatialContext
|
||||
* @return {@link ShapeBuilder} read from the parser or null
|
||||
* if the parsers current token has been <code>null</code>
|
||||
* @throws IOException if the input could not be read
|
||||
*/
|
||||
public static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper geoDocMapper) throws IOException {
|
||||
return GeoShapeType.parse(parser, geoDocMapper);
|
||||
}
|
||||
|
||||
protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException {
|
||||
return builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
|
||||
}
|
||||
|
||||
protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException {
|
||||
out.writeDouble(coordinate.x);
|
||||
out.writeDouble(coordinate.y);
|
||||
}
|
||||
|
||||
protected static Coordinate readFromStream(StreamInput in) throws IOException {
|
||||
return new Coordinate(in.readDouble(), in.readDouble());
|
||||
}
|
||||
public abstract T build();
|
||||
|
||||
protected static Coordinate shift(Coordinate coordinate, double dateline) {
|
||||
if (dateline == 0) {
|
||||
|
@ -255,58 +274,6 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
|||
return numIntersections;
|
||||
}
|
||||
|
||||
/**
|
||||
* Node used to represent a tree of coordinates.
|
||||
* <p>
|
||||
* Can either be a leaf node consisting of a Coordinate, or a parent with
|
||||
* children
|
||||
*/
|
||||
protected static class CoordinateNode implements ToXContentObject {
|
||||
|
||||
protected final Coordinate coordinate;
|
||||
protected final List<CoordinateNode> children;
|
||||
|
||||
/**
|
||||
* Creates a new leaf CoordinateNode
|
||||
*
|
||||
* @param coordinate
|
||||
* Coordinate for the Node
|
||||
*/
|
||||
protected CoordinateNode(Coordinate coordinate) {
|
||||
this.coordinate = coordinate;
|
||||
this.children = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new parent CoordinateNode
|
||||
*
|
||||
* @param children
|
||||
* Children of the Node
|
||||
*/
|
||||
protected CoordinateNode(List<CoordinateNode> children) {
|
||||
this.children = children;
|
||||
this.coordinate = null;
|
||||
}
|
||||
|
||||
protected boolean isEmpty() {
|
||||
return (coordinate == null && (children == null || children.isEmpty()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (children == null) {
|
||||
builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
|
||||
} else {
|
||||
builder.startArray();
|
||||
for (CoordinateNode child : children) {
|
||||
child.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This helper class implements a linked list for {@link Coordinate}. It contains
|
||||
* fields for a dateline intersection and component id
|
||||
|
@ -415,293 +382,50 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject {
|
|||
}
|
||||
}
|
||||
|
||||
public static final String FIELD_TYPE = "type";
|
||||
public static final String FIELD_COORDINATES = "coordinates";
|
||||
public static final String FIELD_GEOMETRIES = "geometries";
|
||||
public static final String FIELD_ORIENTATION = "orientation";
|
||||
|
||||
protected static final boolean debugEnabled() {
|
||||
return LOGGER.isDebugEnabled() || DEBUG;
|
||||
}
|
||||
|
||||
protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException {
|
||||
return builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration that lists all {@link GeoShapeType}s that can be handled
|
||||
* builds an array of coordinates to a {@link XContentBuilder}
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
* @return the builder
|
||||
*/
|
||||
public enum GeoShapeType {
|
||||
POINT("point"),
|
||||
MULTIPOINT("multipoint"),
|
||||
LINESTRING("linestring"),
|
||||
MULTILINESTRING("multilinestring"),
|
||||
POLYGON("polygon"),
|
||||
MULTIPOLYGON("multipolygon"),
|
||||
GEOMETRYCOLLECTION("geometrycollection"),
|
||||
ENVELOPE("envelope"),
|
||||
CIRCLE("circle");
|
||||
|
||||
private final String shapename;
|
||||
|
||||
GeoShapeType(String shapename) {
|
||||
this.shapename = shapename;
|
||||
protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException {
|
||||
builder.startArray();
|
||||
for(Coordinate coord : coordinates) {
|
||||
toXContent(builder, coord);
|
||||
}
|
||||
|
||||
protected String shapeName() {
|
||||
return shapename;
|
||||
}
|
||||
|
||||
public static GeoShapeType forName(String geoshapename) {
|
||||
String typename = geoshapename.toLowerCase(Locale.ROOT);
|
||||
for (GeoShapeType type : values()) {
|
||||
if(type.shapename.equals(typename)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]");
|
||||
}
|
||||
|
||||
public static ShapeBuilder parse(XContentParser parser) throws IOException {
|
||||
return parse(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the geometry specified by the source document and return a ShapeBuilder instance used to
|
||||
* build the actual geometry
|
||||
* @param parser - parse utility object including source document
|
||||
* @param shapeMapper - field mapper needed for index specific parameters
|
||||
* @return ShapeBuilder - a builder instance used to create the geometry
|
||||
*/
|
||||
public static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
} else if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
|
||||
}
|
||||
|
||||
GeoShapeType shapeType = null;
|
||||
Distance radius = null;
|
||||
CoordinateNode node = null;
|
||||
GeometryCollectionBuilder geometryCollections = null;
|
||||
|
||||
Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.fieldType().orientation();
|
||||
boolean coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE.value() : shapeMapper.coerce().value();
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
if (FIELD_TYPE.equals(fieldName)) {
|
||||
parser.nextToken();
|
||||
shapeType = GeoShapeType.forName(parser.text());
|
||||
} else if (FIELD_COORDINATES.equals(fieldName)) {
|
||||
parser.nextToken();
|
||||
node = parseCoordinates(parser);
|
||||
} else if (FIELD_GEOMETRIES.equals(fieldName)) {
|
||||
parser.nextToken();
|
||||
geometryCollections = parseGeometries(parser, shapeMapper);
|
||||
} else if (CircleBuilder.FIELD_RADIUS.equals(fieldName)) {
|
||||
parser.nextToken();
|
||||
radius = Distance.parseDistance(parser.text());
|
||||
} else if (FIELD_ORIENTATION.equals(fieldName)) {
|
||||
parser.nextToken();
|
||||
requestedOrientation = Orientation.fromString(parser.text());
|
||||
} else {
|
||||
parser.nextToken();
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shapeType == null) {
|
||||
throw new ElasticsearchParseException("shape type not included");
|
||||
} else if (node == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) {
|
||||
throw new ElasticsearchParseException("coordinates not included");
|
||||
} else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) {
|
||||
throw new ElasticsearchParseException("geometries not included");
|
||||
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
|
||||
throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS,
|
||||
CircleBuilder.TYPE);
|
||||
}
|
||||
|
||||
switch (shapeType) {
|
||||
case POINT: return parsePoint(node);
|
||||
case MULTIPOINT: return parseMultiPoint(node);
|
||||
case LINESTRING: return parseLineString(node);
|
||||
case MULTILINESTRING: return parseMultiLine(node);
|
||||
case POLYGON: return parsePolygon(node, requestedOrientation, coerce);
|
||||
case MULTIPOLYGON: return parseMultiPolygon(node, requestedOrientation, coerce);
|
||||
case CIRCLE: return parseCircle(node, radius);
|
||||
case ENVELOPE: return parseEnvelope(node);
|
||||
case GEOMETRYCOLLECTION: return geometryCollections;
|
||||
default:
|
||||
throw new ElasticsearchParseException("shape type [{}] not included", shapeType);
|
||||
if(closed) {
|
||||
Coordinate start = coordinates.get(0);
|
||||
Coordinate end = coordinates.get(coordinates.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
toXContent(builder, coordinates.get(0));
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected static void validatePointNode(CoordinateNode node) {
|
||||
if (node.isEmpty()) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid number of points (0) provided when expecting a single coordinate ([lat, lng])");
|
||||
} else if (node.coordinate == null) {
|
||||
if (node.children.isEmpty() == false) {
|
||||
throw new ElasticsearchParseException("multipoint data provided when single point data expected.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ShapeBuilder)) return false;
|
||||
|
||||
protected static PointBuilder parsePoint(CoordinateNode node) {
|
||||
validatePointNode(node);
|
||||
return ShapeBuilders.newPoint(node.coordinate);
|
||||
}
|
||||
ShapeBuilder<?,?> that = (ShapeBuilder<?,?>) o;
|
||||
|
||||
protected static CircleBuilder parseCircle(CoordinateNode coordinates, Distance radius) {
|
||||
return ShapeBuilders.newCircleBuilder().center(coordinates.coordinate).radius(radius);
|
||||
}
|
||||
return Objects.equals(coordinates, that.coordinates);
|
||||
}
|
||||
|
||||
protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates) {
|
||||
// validate the coordinate array for envelope type
|
||||
if (coordinates.children.size() != 2) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates",
|
||||
coordinates.children.size(), GeoShapeType.ENVELOPE.shapename);
|
||||
}
|
||||
// verify coordinate bounds, correct if necessary
|
||||
Coordinate uL = coordinates.children.get(0).coordinate;
|
||||
Coordinate lR = coordinates.children.get(1).coordinate;
|
||||
if (((lR.x < uL.x) || (uL.y < lR.y))) {
|
||||
Coordinate uLtmp = uL;
|
||||
uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y));
|
||||
lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y));
|
||||
}
|
||||
return ShapeBuilders.newEnvelope(uL, lR);
|
||||
}
|
||||
|
||||
protected static void validateMultiPointNode(CoordinateNode coordinates) {
|
||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||
if (coordinates.coordinate != null) {
|
||||
throw new ElasticsearchParseException("single coordinate found when expecting an array of " +
|
||||
"coordinates. change type to point or change data to an array of >0 coordinates");
|
||||
}
|
||||
throw new ElasticsearchParseException("no data provided for multipoint object when expecting " +
|
||||
">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])");
|
||||
} else {
|
||||
for (CoordinateNode point : coordinates.children) {
|
||||
validatePointNode(point);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected static MultiPointBuilder parseMultiPoint(CoordinateNode coordinates) {
|
||||
validateMultiPointNode(coordinates);
|
||||
CoordinatesBuilder points = new CoordinatesBuilder();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
points.coordinate(node.coordinate);
|
||||
}
|
||||
return new MultiPointBuilder(points.build());
|
||||
}
|
||||
|
||||
protected static LineStringBuilder parseLineString(CoordinateNode coordinates) {
|
||||
/**
|
||||
* Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring)
|
||||
* "coordinates" member must be an array of two or more positions
|
||||
* LineStringBuilder should throw a graceful exception if < 2 coordinates/points are provided
|
||||
*/
|
||||
if (coordinates.children.size() < 2) {
|
||||
throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)",
|
||||
coordinates.children.size());
|
||||
}
|
||||
|
||||
CoordinatesBuilder line = new CoordinatesBuilder();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
line.coordinate(node.coordinate);
|
||||
}
|
||||
return ShapeBuilders.newLineString(line);
|
||||
}
|
||||
|
||||
protected static MultiLineStringBuilder parseMultiLine(CoordinateNode coordinates) {
|
||||
MultiLineStringBuilder multiline = ShapeBuilders.newMultiLinestring();
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
multiline.linestring(parseLineString(node));
|
||||
}
|
||||
return multiline;
|
||||
}
|
||||
|
||||
protected static LineStringBuilder parseLinearRing(CoordinateNode coordinates, boolean coerce) {
|
||||
/**
|
||||
* Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring)
|
||||
* A LinearRing is closed LineString with 4 or more positions. The first and last positions
|
||||
* are equivalent (they represent equivalent points). Though a LinearRing is not explicitly
|
||||
* represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition.
|
||||
*/
|
||||
if (coordinates.children == null) {
|
||||
String error = "Invalid LinearRing found.";
|
||||
error += (coordinates.coordinate == null) ?
|
||||
" No coordinate array provided" : " Found a single coordinate when expecting a coordinate array";
|
||||
throw new ElasticsearchParseException(error);
|
||||
}
|
||||
|
||||
int numValidPts = coerce ? 3 : 4;
|
||||
if (coordinates.children.size() < numValidPts) {
|
||||
throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])",
|
||||
coordinates.children.size(), numValidPts);
|
||||
}
|
||||
|
||||
if (!coordinates.children.get(0).coordinate.equals(
|
||||
coordinates.children.get(coordinates.children.size() - 1).coordinate)) {
|
||||
if (coerce) {
|
||||
coordinates.children.add(coordinates.children.get(0));
|
||||
} else {
|
||||
throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)");
|
||||
}
|
||||
}
|
||||
return parseLineString(coordinates);
|
||||
}
|
||||
|
||||
protected static PolygonBuilder parsePolygon(CoordinateNode coordinates, final Orientation orientation, final boolean coerce) {
|
||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||
throw new ElasticsearchParseException(
|
||||
"invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates");
|
||||
}
|
||||
|
||||
LineStringBuilder shell = parseLinearRing(coordinates.children.get(0), coerce);
|
||||
PolygonBuilder polygon = new PolygonBuilder(shell, orientation);
|
||||
for (int i = 1; i < coordinates.children.size(); i++) {
|
||||
polygon.hole(parseLinearRing(coordinates.children.get(i), coerce));
|
||||
}
|
||||
return polygon;
|
||||
}
|
||||
|
||||
protected static MultiPolygonBuilder parseMultiPolygon(CoordinateNode coordinates, final Orientation orientation,
|
||||
final boolean coerce) {
|
||||
MultiPolygonBuilder polygons = ShapeBuilders.newMultiPolygon(orientation);
|
||||
for (CoordinateNode node : coordinates.children) {
|
||||
polygons.polygon(parsePolygon(node, orientation, coerce));
|
||||
}
|
||||
return polygons;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the geometries array of a GeometryCollection
|
||||
*
|
||||
* @param parser Parser that will be read from
|
||||
* @return Geometry[] geometries of the GeometryCollection
|
||||
* @throws IOException Thrown if an error occurs while reading from the XContentParser
|
||||
*/
|
||||
protected static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws
|
||||
IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throw new ElasticsearchParseException("geometries must be an array of geojson objects");
|
||||
}
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection();
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
ShapeBuilder shapeBuilder = GeoShapeType.parse(parser);
|
||||
geometryCollection.shape(shapeBuilder);
|
||||
token = parser.nextToken();
|
||||
}
|
||||
|
||||
return geometryCollection;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(coordinates);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,153 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
|
||||
/**
|
||||
* A collection of static methods for creating ShapeBuilders.
|
||||
*/
|
||||
public class ShapeBuilders {
|
||||
|
||||
/**
|
||||
* Create a new point
|
||||
*
|
||||
* @param longitude longitude of the point
|
||||
* @param latitude latitude of the point
|
||||
* @return a new {@link PointBuilder}
|
||||
*/
|
||||
public static PointBuilder newPoint(double longitude, double latitude) {
|
||||
return ShapeBuilders.newPoint(new Coordinate(longitude, latitude));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PointBuilder} from a {@link Coordinate}
|
||||
* @param coordinate coordinate defining the position of the point
|
||||
* @return a new {@link PointBuilder}
|
||||
*/
|
||||
public static PointBuilder newPoint(Coordinate coordinate) {
|
||||
return new PointBuilder().coordinate(coordinate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new set of points
|
||||
* @return new {@link MultiPointBuilder}
|
||||
*/
|
||||
public static MultiPointBuilder newMultiPoint(List<Coordinate> points) {
|
||||
return new MultiPointBuilder(points);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new lineString
|
||||
* @return a new {@link LineStringBuilder}
|
||||
*/
|
||||
public static LineStringBuilder newLineString(List<Coordinate> list) {
|
||||
return new LineStringBuilder(list);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new lineString
|
||||
* @return a new {@link LineStringBuilder}
|
||||
*/
|
||||
public static LineStringBuilder newLineString(CoordinatesBuilder coordinates) {
|
||||
return new LineStringBuilder(coordinates);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Collection of lineStrings
|
||||
* @return a new {@link MultiLineStringBuilder}
|
||||
*/
|
||||
public static MultiLineStringBuilder newMultiLinestring() {
|
||||
return new MultiLineStringBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new PolygonBuilder
|
||||
* @return a new {@link PolygonBuilder}
|
||||
*/
|
||||
public static PolygonBuilder newPolygon(List<Coordinate> shell) {
|
||||
return new PolygonBuilder(new CoordinatesBuilder().coordinates(shell));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new PolygonBuilder
|
||||
* @return a new {@link PolygonBuilder}
|
||||
*/
|
||||
public static PolygonBuilder newPolygon(CoordinatesBuilder shell) {
|
||||
return new PolygonBuilder(shell);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Collection of polygons
|
||||
* @return a new {@link MultiPolygonBuilder}
|
||||
*/
|
||||
public static MultiPolygonBuilder newMultiPolygon() {
|
||||
return new MultiPolygonBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Collection of polygons
|
||||
* @return a new {@link MultiPolygonBuilder}
|
||||
*/
|
||||
public static MultiPolygonBuilder newMultiPolygon(ShapeBuilder.Orientation orientation) {
|
||||
return new MultiPolygonBuilder(orientation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new GeometryCollection
|
||||
* @return a new {@link GeometryCollectionBuilder}
|
||||
*/
|
||||
public static GeometryCollectionBuilder newGeometryCollection() {
|
||||
return new GeometryCollectionBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* create a new Circle
|
||||
*
|
||||
* @return a new {@link CircleBuilder}
|
||||
*/
|
||||
public static CircleBuilder newCircleBuilder() {
|
||||
return new CircleBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* create a new rectangle
|
||||
*
|
||||
* @return a new {@link EnvelopeBuilder}
|
||||
*/
|
||||
public static EnvelopeBuilder newEnvelope(Coordinate topLeft, Coordinate bottomRight) {
|
||||
return new EnvelopeBuilder(topLeft, bottomRight);
|
||||
}
|
||||
|
||||
public static void register(List<Entry> namedWriteables) {
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, PointBuilder.TYPE.shapeName(), PointBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, CircleBuilder.TYPE.shapeName(), CircleBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, EnvelopeBuilder.TYPE.shapeName(), EnvelopeBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiPointBuilder.TYPE.shapeName(), MultiPointBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, LineStringBuilder.TYPE.shapeName(), LineStringBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiLineStringBuilder.TYPE.shapeName(), MultiLineStringBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, PolygonBuilder.TYPE.shapeName(), PolygonBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, MultiPolygonBuilder.TYPE.shapeName(), MultiPolygonBuilder::new));
|
||||
namedWriteables.add(new Entry(ShapeBuilder.class, GeometryCollectionBuilder.TYPE.shapeName(), GeometryCollectionBuilder::new));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo.parsers;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Node used to represent a tree of coordinates.
|
||||
* <p>
|
||||
* Can either be a leaf node consisting of a Coordinate, or a parent with
|
||||
* children
|
||||
*/
|
||||
public class CoordinateNode implements ToXContentObject {
|
||||
public final Coordinate coordinate;
|
||||
public final List<CoordinateNode> children;
|
||||
|
||||
/**
|
||||
* Creates a new leaf CoordinateNode
|
||||
*
|
||||
* @param coordinate
|
||||
* Coordinate for the Node
|
||||
*/
|
||||
protected CoordinateNode(Coordinate coordinate) {
|
||||
this.coordinate = coordinate;
|
||||
this.children = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new parent CoordinateNode
|
||||
*
|
||||
* @param children
|
||||
* Children of the Node
|
||||
*/
|
||||
protected CoordinateNode(List<CoordinateNode> children) {
|
||||
this.children = children;
|
||||
this.coordinate = null;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return (coordinate == null && (children == null || children.isEmpty()));
|
||||
}
|
||||
|
||||
public boolean isMultiPoint() {
|
||||
return children != null && children.size() > 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (children == null) {
|
||||
builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
|
||||
} else {
|
||||
builder.startArray();
|
||||
for (CoordinateNode child : children) {
|
||||
child.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,194 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo.parsers;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.builders.CircleBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Parses shape geometry represented in geojson
|
||||
*
|
||||
* complies with geojson specification: https://tools.ietf.org/html/rfc7946
|
||||
*/
|
||||
abstract class GeoJsonParser {
|
||||
protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper)
|
||||
throws IOException {
|
||||
GeoShapeType shapeType = null;
|
||||
DistanceUnit.Distance radius = null;
|
||||
CoordinateNode coordinateNode = null;
|
||||
GeometryCollectionBuilder geometryCollections = null;
|
||||
|
||||
ShapeBuilder.Orientation requestedOrientation =
|
||||
(shapeMapper == null) ? ShapeBuilder.Orientation.RIGHT : shapeMapper.fieldType().orientation();
|
||||
Explicit<Boolean> coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce();
|
||||
|
||||
String malformedException = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
if (ShapeParser.FIELD_TYPE.match(fieldName)) {
|
||||
parser.nextToken();
|
||||
final GeoShapeType type = GeoShapeType.forName(parser.text());
|
||||
if (shapeType != null && shapeType.equals(type) == false) {
|
||||
malformedException = ShapeParser.FIELD_TYPE + " already parsed as ["
|
||||
+ shapeType + "] cannot redefine as [" + type + "]";
|
||||
} else {
|
||||
shapeType = type;
|
||||
}
|
||||
} else if (ShapeParser.FIELD_COORDINATES.match(fieldName)) {
|
||||
parser.nextToken();
|
||||
coordinateNode = parseCoordinates(parser);
|
||||
} else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName)) {
|
||||
if (shapeType == null) {
|
||||
shapeType = GeoShapeType.GEOMETRYCOLLECTION;
|
||||
} else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) {
|
||||
malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to ["
|
||||
+ shapeType + "]";
|
||||
}
|
||||
parser.nextToken();
|
||||
geometryCollections = parseGeometries(parser, shapeMapper);
|
||||
} else if (CircleBuilder.FIELD_RADIUS.match(fieldName)) {
|
||||
if (shapeType == null) {
|
||||
shapeType = GeoShapeType.CIRCLE;
|
||||
} else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) {
|
||||
malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to ["
|
||||
+ shapeType + "]";
|
||||
}
|
||||
parser.nextToken();
|
||||
radius = DistanceUnit.Distance.parseDistance(parser.text());
|
||||
} else if (ShapeParser.FIELD_ORIENTATION.match(fieldName)) {
|
||||
if (shapeType != null
|
||||
&& (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) {
|
||||
malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]";
|
||||
}
|
||||
parser.nextToken();
|
||||
requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text());
|
||||
} else {
|
||||
parser.nextToken();
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (malformedException != null) {
|
||||
throw new ElasticsearchParseException(malformedException);
|
||||
} else if (shapeType == null) {
|
||||
throw new ElasticsearchParseException("shape type not included");
|
||||
} else if (coordinateNode == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) {
|
||||
throw new ElasticsearchParseException("coordinates not included");
|
||||
} else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) {
|
||||
throw new ElasticsearchParseException("geometries not included");
|
||||
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
|
||||
throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS,
|
||||
CircleBuilder.TYPE);
|
||||
}
|
||||
|
||||
if (shapeType == null) {
|
||||
throw new ElasticsearchParseException("shape type [{}] not included", shapeType);
|
||||
}
|
||||
|
||||
if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION)) {
|
||||
return geometryCollections;
|
||||
}
|
||||
|
||||
return shapeType.getBuilder(coordinateNode, radius, requestedOrientation, coerce.value());
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive method which parses the arrays of coordinates used to define
|
||||
* Shapes
|
||||
*
|
||||
* @param parser
|
||||
* Parser that will be read from
|
||||
* @return CoordinateNode representing the start of the coordinate tree
|
||||
* @throws IOException
|
||||
* Thrown if an error occurs while reading from the
|
||||
* XContentParser
|
||||
*/
|
||||
private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
// Base cases
|
||||
if (token != XContentParser.Token.START_ARRAY &&
|
||||
token != XContentParser.Token.END_ARRAY &&
|
||||
token != XContentParser.Token.VALUE_NULL) {
|
||||
return new CoordinateNode(parseCoordinate(parser));
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
throw new IllegalArgumentException("coordinates cannot contain NULL values)");
|
||||
}
|
||||
|
||||
List<CoordinateNode> nodes = new ArrayList<>();
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
nodes.add(parseCoordinates(parser));
|
||||
token = parser.nextToken();
|
||||
}
|
||||
|
||||
return new CoordinateNode(nodes);
|
||||
}
|
||||
|
||||
private static Coordinate parseCoordinate(XContentParser parser) throws IOException {
|
||||
double lon = parser.doubleValue();
|
||||
parser.nextToken();
|
||||
double lat = parser.doubleValue();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
while (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
// todo support z/alt
|
||||
return new Coordinate(lon, lat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the geometries array of a GeometryCollection
|
||||
*
|
||||
* @param parser Parser that will be read from
|
||||
* @return Geometry[] geometries of the GeometryCollection
|
||||
* @throws IOException Thrown if an error occurs while reading from the XContentParser
|
||||
*/
|
||||
static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws
|
||||
IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throw new ElasticsearchParseException("geometries must be an array of geojson objects");
|
||||
}
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
GeometryCollectionBuilder geometryCollection = new GeometryCollectionBuilder();
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
ShapeBuilder shapeBuilder = ShapeParser.parse(parser);
|
||||
geometryCollection.shape(shapeBuilder);
|
||||
token = parser.nextToken();
|
||||
}
|
||||
|
||||
return geometryCollection;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo.parsers;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* first point of entry for a shape parser
|
||||
*/
|
||||
public interface ShapeParser {
|
||||
ParseField FIELD_TYPE = new ParseField("type");
|
||||
ParseField FIELD_COORDINATES = new ParseField("coordinates");
|
||||
ParseField FIELD_GEOMETRIES = new ParseField("geometries");
|
||||
ParseField FIELD_ORIENTATION = new ParseField("orientation");
|
||||
|
||||
/**
|
||||
* Create a new {@link ShapeBuilder} from {@link XContent}
|
||||
* @param parser parser to read the GeoShape from
|
||||
* @param shapeMapper document field mapper reference required for spatial parameters relevant
|
||||
* to the shape construction process (e.g., orientation)
|
||||
* todo: refactor to place build specific parameters in the SpatialContext
|
||||
* @return {@link ShapeBuilder} read from the parser or null
|
||||
* if the parsers current token has been <code>null</code>
|
||||
* @throws IOException if the input could not be read
|
||||
*/
|
||||
static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
} if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
return GeoJsonParser.parse(parser, shapeMapper);
|
||||
}
|
||||
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShapeBuilder} from {@link XContent}
|
||||
* @param parser parser to read the GeoShape from
|
||||
* @return {@link ShapeBuilder} read from the parser or null
|
||||
* if the parsers current token has been <code>null</code>
|
||||
* @throws IOException if the input could not be read
|
||||
*/
|
||||
static ShapeBuilder parse(XContentParser parser) throws IOException {
|
||||
return parse(parser, null);
|
||||
}
|
||||
}
|
|
@ -264,20 +264,16 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
}
|
||||
|
||||
/**
|
||||
* Validates that all settings in the builder are registered and valid
|
||||
* Validates that all given settings are registered and valid
|
||||
* @param settings the settings to validate
|
||||
* @param validateDependencies if <code>true</code> settings dependencies are validated as well.
|
||||
* @see Setting#getSettingsDependencies(String)
|
||||
*/
|
||||
public final void validate(Settings.Builder settingsBuilder) {
|
||||
validate(settingsBuilder.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* * Validates that all given settings are registered and valid
|
||||
*/
|
||||
public final void validate(Settings settings) {
|
||||
public final void validate(Settings settings, boolean validateDependencies) {
|
||||
List<RuntimeException> exceptions = new ArrayList<>();
|
||||
for (String key : settings.keySet()) { // settings iterate in deterministic fashion
|
||||
try {
|
||||
validate(key, settings);
|
||||
validate(key, settings, validateDependencies);
|
||||
} catch (RuntimeException ex) {
|
||||
exceptions.add(ex);
|
||||
}
|
||||
|
@ -285,12 +281,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
ExceptionsHelper.rethrowAndSuppress(exceptions);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Validates that the setting is valid
|
||||
*/
|
||||
public final void validate(String key, Settings settings) {
|
||||
Setting setting = get(key);
|
||||
void validate(String key, Settings settings, boolean validateDependencies) {
|
||||
Setting setting = getRaw(key);
|
||||
if (setting == null) {
|
||||
LevensteinDistance ld = new LevensteinDistance();
|
||||
List<Tuple<Float, String>> scoredKeys = new ArrayList<>();
|
||||
|
@ -315,6 +310,20 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
"settings";
|
||||
}
|
||||
throw new IllegalArgumentException(msg);
|
||||
} else {
|
||||
Set<String> settingsDependencies = setting.getSettingsDependencies(key);
|
||||
if (setting.hasComplexMatcher()) {
|
||||
setting = setting.getConcreteSetting(key);
|
||||
}
|
||||
if (validateDependencies && settingsDependencies.isEmpty() == false) {
|
||||
Set<String> settingKeys = settings.keySet();
|
||||
for (String requiredSetting : settingsDependencies) {
|
||||
if (settingKeys.contains(requiredSetting) == false) {
|
||||
throw new IllegalArgumentException("Missing required setting ["
|
||||
+ requiredSetting + "] for setting [" + setting.getKey() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
setting.get(settings);
|
||||
}
|
||||
|
@ -375,7 +384,18 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
/**
|
||||
* Returns the {@link Setting} for the given key or <code>null</code> if the setting can not be found.
|
||||
*/
|
||||
public Setting<?> get(String key) {
|
||||
public final Setting<?> get(String key) {
|
||||
Setting<?> raw = getRaw(key);
|
||||
if (raw == null) {
|
||||
return null;
|
||||
} if (raw.hasComplexMatcher()) {
|
||||
return raw.getConcreteSetting(key);
|
||||
} else {
|
||||
return raw;
|
||||
}
|
||||
}
|
||||
|
||||
private Setting<?> getRaw(String key) {
|
||||
Setting<?> setting = keySettings.get(key);
|
||||
if (setting != null) {
|
||||
return setting;
|
||||
|
@ -383,7 +403,8 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
|
||||
if (entry.getValue().match(key)) {
|
||||
assert assertMatcher(key, 1);
|
||||
return entry.getValue().getConcreteSetting(key);
|
||||
assert entry.getValue().hasComplexMatcher();
|
||||
return entry.getValue();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -513,7 +534,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
} else if (get(key) == null) {
|
||||
throw new IllegalArgumentException(type + " setting [" + key + "], not recognized");
|
||||
} else if (isNull == false && canUpdate.test(key)) {
|
||||
validate(key, toApply);
|
||||
validate(key, toApply, false); // we might not have a full picture here do to a dependency validation
|
||||
settingsBuilder.copy(key, toApply);
|
||||
updates.copy(key, toApply);
|
||||
changed = true;
|
||||
|
@ -654,7 +675,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
* representation. Otherwise <code>false</code>
|
||||
*/
|
||||
// TODO this should be replaced by Setting.Property.HIDDEN or something like this.
|
||||
protected boolean isPrivateSetting(String key) {
|
||||
public boolean isPrivateSetting(String key) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -191,7 +191,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean isPrivateSetting(String key) {
|
||||
public boolean isPrivateSetting(String key) {
|
||||
switch (key) {
|
||||
case IndexMetaData.SETTING_CREATION_DATE:
|
||||
case IndexMetaData.SETTING_INDEX_UUID:
|
||||
|
|
|
@ -42,6 +42,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -126,7 +127,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
private static final EnumSet<Property> EMPTY_PROPERTIES = EnumSet.noneOf(Property.class);
|
||||
|
||||
private Setting(Key key, @Nullable Setting<T> fallbackSetting, Function<Settings, String> defaultValue, Function<String, T> parser,
|
||||
Validator<T> validator, Property... properties) {
|
||||
Validator<T> validator, Property... properties) {
|
||||
assert this instanceof SecureSetting || this.isGroupSetting() || parser.apply(defaultValue.apply(Settings.EMPTY)) != null
|
||||
: "parser returned null";
|
||||
this.key = key;
|
||||
|
@ -457,6 +458,14 @@ public class Setting<T> implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of settings that are required at validation time. Unless all of the dependencies are present in the settings
|
||||
* object validation of setting must fail.
|
||||
*/
|
||||
public Set<String> getSettingsDependencies(String key) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a new updater with a noop validator.
|
||||
*/
|
||||
|
@ -519,11 +528,13 @@ public class Setting<T> implements ToXContentObject {
|
|||
public static class AffixSetting<T> extends Setting<T> {
|
||||
private final AffixKey key;
|
||||
private final Function<String, Setting<T>> delegateFactory;
|
||||
private final Set<AffixSetting> dependencies;
|
||||
|
||||
public AffixSetting(AffixKey key, Setting<T> delegate, Function<String, Setting<T>> delegateFactory) {
|
||||
public AffixSetting(AffixKey key, Setting<T> delegate, Function<String, Setting<T>> delegateFactory, AffixSetting... dependencies) {
|
||||
super(key, delegate.defaultValue, delegate.parser, delegate.properties.toArray(new Property[0]));
|
||||
this.key = key;
|
||||
this.delegateFactory = delegateFactory;
|
||||
this.dependencies = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(dependencies)));
|
||||
}
|
||||
|
||||
boolean isGroupSetting() {
|
||||
|
@ -534,6 +545,15 @@ public class Setting<T> implements ToXContentObject {
|
|||
return settings.keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey));
|
||||
}
|
||||
|
||||
public Set<String> getSettingsDependencies(String settingsKey) {
|
||||
if (dependencies.isEmpty()) {
|
||||
return Collections.emptySet();
|
||||
} else {
|
||||
String namespace = key.getNamespace(settingsKey);
|
||||
return dependencies.stream().map(s -> s.key.toConcreteKey(namespace).key).collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
||||
AbstractScopedSettings.SettingUpdater<Map<AbstractScopedSettings.SettingUpdater<T>, T>> newAffixUpdater(
|
||||
BiConsumer<String, T> consumer, Logger logger, BiConsumer<String, T> validator) {
|
||||
return new AbstractScopedSettings.SettingUpdater<Map<AbstractScopedSettings.SettingUpdater<T>, T>>() {
|
||||
|
@ -659,6 +679,13 @@ public class Setting<T> implements ToXContentObject {
|
|||
return matchStream(settings).distinct().map(this::getConcreteSetting);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns distinct namespaces for the given settings
|
||||
*/
|
||||
public Set<String> getNamespaces(Settings settings) {
|
||||
return settings.keySet().stream().filter(this::match).map(key::getNamespace).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map of all namespaces to it's values give the provided settings
|
||||
*/
|
||||
|
@ -1184,13 +1211,15 @@ public class Setting<T> implements ToXContentObject {
|
|||
* storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, affix key settings don't support updaters
|
||||
* out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater.
|
||||
*/
|
||||
public static <T> AffixSetting<T> affixKeySetting(String prefix, String suffix, Function<String, Setting<T>> delegateFactory) {
|
||||
return affixKeySetting(new AffixKey(prefix, suffix), delegateFactory);
|
||||
public static <T> AffixSetting<T> affixKeySetting(String prefix, String suffix, Function<String, Setting<T>> delegateFactory,
|
||||
AffixSetting... dependencies) {
|
||||
return affixKeySetting(new AffixKey(prefix, suffix), delegateFactory, dependencies);
|
||||
}
|
||||
|
||||
private static <T> AffixSetting<T> affixKeySetting(AffixKey key, Function<String, Setting<T>> delegateFactory) {
|
||||
private static <T> AffixSetting<T> affixKeySetting(AffixKey key, Function<String, Setting<T>> delegateFactory,
|
||||
AffixSetting... dependencies) {
|
||||
Setting<T> delegate = delegateFactory.apply("_na_");
|
||||
return new AffixSetting<>(key, delegate, delegateFactory);
|
||||
return new AffixSetting<>(key, delegate, delegateFactory, dependencies);
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ public class SettingsModule implements Module {
|
|||
}
|
||||
}
|
||||
// by now we are fully configured, lets check node level settings for unregistered index settings
|
||||
clusterSettings.validate(settings);
|
||||
clusterSettings.validate(settings, true);
|
||||
this.settingsFilter = new SettingsFilter(settings, settingsFilterPattern);
|
||||
}
|
||||
|
||||
|
|
|
@ -575,7 +575,8 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) {
|
||||
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException ||
|
||||
exp.getCause() instanceof AlreadyClosedException) {
|
||||
// ok, not connected...
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("failed to connect to {}", node), exp);
|
||||
} else if (closed == false) {
|
||||
|
@ -608,6 +609,9 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing {
|
|||
|
||||
@Override
|
||||
public void messageReceived(UnicastPingRequest request, TransportChannel channel) throws Exception {
|
||||
if (closed) {
|
||||
throw new AlreadyClosedException("node is shutting down");
|
||||
}
|
||||
if (request.pingResponse.clusterName().equals(clusterName)) {
|
||||
channel.sendResponse(handlePingRequest(request));
|
||||
} else {
|
||||
|
|
|
@ -114,19 +114,24 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
|||
final String fieldName = fieldType.name();
|
||||
IndexFieldData.Builder builder = fieldType.fielddataBuilder(fullyQualifiedIndexName);
|
||||
|
||||
IndexFieldDataCache cache;
|
||||
synchronized (this) {
|
||||
cache = fieldDataCaches.get(fieldName);
|
||||
if (cache == null) {
|
||||
String cacheType = indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY);
|
||||
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
|
||||
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName);
|
||||
} else if ("none".equals(cacheType)){
|
||||
cache = new IndexFieldDataCache.None();
|
||||
} else {
|
||||
throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]");
|
||||
IndexFieldDataCache cache = fieldDataCaches.get(fieldName);
|
||||
if (cache == null) {
|
||||
//for perf reason, only synchronize when cache is null
|
||||
synchronized (this) {
|
||||
cache = fieldDataCaches.get(fieldName);
|
||||
//double checked locking to make sure it is thread safe
|
||||
//especially when other threads calling clear() or clearField()
|
||||
if (cache == null) {
|
||||
String cacheType = indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY);
|
||||
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
|
||||
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName);
|
||||
} else if ("none".equals(cacheType)){
|
||||
cache = new IndexFieldDataCache.None();
|
||||
} else {
|
||||
throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]");
|
||||
}
|
||||
fieldDataCaches.put(fieldName, cache);
|
||||
}
|
||||
fieldDataCaches.put(fieldName, cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -46,13 +46,11 @@ public abstract class DocValuesIndexFieldData {
|
|||
|
||||
protected final Index index;
|
||||
protected final String fieldName;
|
||||
protected final Logger logger;
|
||||
|
||||
public DocValuesIndexFieldData(Index index, String fieldName) {
|
||||
super();
|
||||
this.index = index;
|
||||
this.fieldName = fieldName;
|
||||
this.logger = Loggers.getLogger(getClass());
|
||||
}
|
||||
|
||||
public final String getFieldName() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.OrdinalMap;
|
||||
|
@ -28,6 +29,7 @@ import org.apache.lucene.search.SortedSetSelector;
|
|||
import org.apache.lucene.search.SortedSetSortField;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
|
@ -48,6 +50,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
|
|||
private final IndexFieldDataCache cache;
|
||||
private final CircuitBreakerService breakerService;
|
||||
private final Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction;
|
||||
private static final Logger logger = Loggers.getLogger(SortedSetDVOrdinalsIndexFieldData.class);
|
||||
|
||||
public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName,
|
||||
CircuitBreakerService breakerService, Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) {
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
|
||||
|
@ -37,6 +36,7 @@ import org.elasticsearch.common.geo.GeoUtils;
|
|||
import org.elasticsearch.common.geo.SpatialStrategy;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -54,6 +54,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED;
|
||||
|
||||
/**
|
||||
* FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s.
|
||||
* <p>
|
||||
|
@ -96,6 +98,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
public static final Orientation ORIENTATION = Orientation.RIGHT;
|
||||
public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
|
||||
public static final Explicit<Boolean> COERCE = new Explicit<>(false, false);
|
||||
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
|
||||
|
||||
|
@ -115,6 +118,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
public static class Builder extends FieldMapper.Builder<Builder, GeoShapeFieldMapper> {
|
||||
|
||||
private Boolean coerce;
|
||||
private Boolean ignoreMalformed;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
|
@ -145,6 +149,21 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
return Defaults.COERCE;
|
||||
}
|
||||
|
||||
public Builder ignoreMalformed(boolean ignoreMalformed) {
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
|
||||
if (ignoreMalformed != null) {
|
||||
return new Explicit<>(ignoreMalformed, true);
|
||||
}
|
||||
if (context.indexSettings() != null) {
|
||||
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
|
||||
}
|
||||
return Defaults.IGNORE_MALFORMED;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeFieldMapper build(BuilderContext context) {
|
||||
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
|
||||
|
@ -154,8 +173,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
setupFieldType(context);
|
||||
|
||||
return new GeoShapeFieldMapper(name, fieldType, coerce(context), context.indexSettings(), multiFieldsBuilder.build(this,
|
||||
context), copyTo);
|
||||
return new GeoShapeFieldMapper(name, fieldType, ignoreMalformed(context), coerce(context), context.indexSettings(),
|
||||
multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,6 +205,9 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
} else if (Names.STRATEGY.equals(fieldName)) {
|
||||
builder.fieldType().setStrategyName(fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (IGNORE_MALFORMED.equals(fieldName)) {
|
||||
builder.ignoreMalformed(TypeParsers.nodeBooleanValue(fieldName, "ignore_malformed", fieldNode, parserContext));
|
||||
iterator.remove();
|
||||
} else if (Names.COERCE.equals(fieldName)) {
|
||||
builder.coerce(TypeParsers.nodeBooleanValue(fieldName, Names.COERCE, fieldNode, parserContext));
|
||||
iterator.remove();
|
||||
|
@ -428,11 +450,13 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
protected Explicit<Boolean> coerce;
|
||||
protected Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit<Boolean> coerce, Settings indexSettings,
|
||||
MultiFields multiFields, CopyTo copyTo) {
|
||||
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
|
||||
this.coerce = coerce;
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -445,7 +469,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
try {
|
||||
Shape shape = context.parseExternalValue(Shape.class);
|
||||
if (shape == null) {
|
||||
ShapeBuilder shapeBuilder = ShapeBuilder.parse(context.parser(), this);
|
||||
ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this);
|
||||
if (shapeBuilder == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -453,7 +477,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
if (fieldType().pointsOnly() && !(shape instanceof Point)) {
|
||||
throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " +
|
||||
((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found");
|
||||
((shape instanceof JtsGeometry) ? ((JtsGeometry) shape).getGeom().getGeometryType() : shape.getClass()) + " was found");
|
||||
}
|
||||
List<IndexableField> fields = new ArrayList<>(Arrays.asList(fieldType().defaultStrategy().createIndexableFields(shape)));
|
||||
createFieldNamesField(context, fields);
|
||||
|
@ -461,7 +485,9 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
|
||||
if (ignoreMalformed.value() == false) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -478,6 +504,9 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
if (gsfm.coerce.explicit()) {
|
||||
this.coerce = gsfm.coerce;
|
||||
}
|
||||
if (gsfm.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gsfm.ignoreMalformed;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -506,7 +535,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
|
||||
}
|
||||
if (includeDefaults || coerce.explicit()) {
|
||||
builder.field("coerce", coerce.value());
|
||||
builder.field(Names.COERCE, coerce.value());
|
||||
}
|
||||
if (includeDefaults || ignoreMalformed.explicit()) {
|
||||
builder.field(IGNORE_MALFORMED, ignoreMalformed.value());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -514,6 +546,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
return coerce;
|
||||
}
|
||||
|
||||
public Explicit<Boolean> ignoreMalformed() {
|
||||
return ignoreMalformed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.geo.ShapeRelation;
|
|||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.geo.SpatialStrategy;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -410,7 +411,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
if (pathElements[currentPathSlot].equals(parser.currentName())) {
|
||||
parser.nextToken();
|
||||
if (++currentPathSlot == pathElements.length) {
|
||||
listener.onResponse(ShapeBuilder.parse(parser));
|
||||
listener.onResponse(ShapeParser.parse(parser));
|
||||
}
|
||||
} else {
|
||||
parser.nextToken();
|
||||
|
@ -517,7 +518,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (SHAPE_FIELD.match(currentFieldName)) {
|
||||
shape = ShapeBuilder.parse(parser);
|
||||
shape = ShapeParser.parse(parser);
|
||||
} else if (STRATEGY_FIELD.match(currentFieldName)) {
|
||||
String strategyName = parser.text();
|
||||
strategy = SpatialStrategy.fromString(strategyName);
|
||||
|
|
|
@ -1111,6 +1111,12 @@ public class IndicesService extends AbstractLifecycleComponent
|
|||
* Can the shard request be cached at all?
|
||||
*/
|
||||
public boolean canCache(ShardSearchRequest request, SearchContext context) {
|
||||
// Queries that create a scroll context cannot use the cache.
|
||||
// They modify the search context during their execution so using the cache
|
||||
// may invalidate the scroll for the next query.
|
||||
if (request.scroll() != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// We cannot cache with DFS because results depend not only on the content of the index but also
|
||||
// on the overridden statistics. So if you ran two queries on the same index with different stats
|
||||
|
@ -1119,6 +1125,7 @@ public class IndicesService extends AbstractLifecycleComponent
|
|||
if (SearchType.QUERY_THEN_FETCH != context.searchType()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
IndexSettings settings = context.indexShard().indexSettings();
|
||||
// if not explicitly set in the request, use the index setting, if not, use the request
|
||||
if (request.requestCache() == null) {
|
||||
|
|
|
@ -21,8 +21,8 @@ package org.elasticsearch.search;
|
|||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.elasticsearch.common.NamedRegistry;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -250,6 +250,7 @@ import org.elasticsearch.search.suggest.phrase.StupidBackoff;
|
|||
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
@ -531,7 +532,7 @@ public class SearchModule {
|
|||
|
||||
private void registerShapes() {
|
||||
if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
|
||||
ShapeBuilders.register(namedWriteables);
|
||||
namedWriteables.addAll(GeoShapeType.getShapeWriteables());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,14 +24,14 @@
|
|||
//// SecurityManager impl:
|
||||
//// Must have all permissions to properly perform access checks
|
||||
|
||||
grant codeBase "${codebase.securesm-1.1.jar}" {
|
||||
grant codeBase "${codebase.securesm}" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
//// Very special jar permissions:
|
||||
//// These are dangerous permissions that we don't want to grant to everything.
|
||||
|
||||
grant codeBase "${codebase.lucene-core-7.1.0.jar}" {
|
||||
grant codeBase "${codebase.lucene-core}" {
|
||||
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
|
||||
// java 8 package
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
|
||||
|
@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-7.1.0.jar}" {
|
|||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.lucene-misc-7.1.0.jar}" {
|
||||
grant codeBase "${codebase.lucene-misc}" {
|
||||
// needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
};
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
//// These are mock objects and test management that we allow test framework libs
|
||||
//// to provide on our behalf. But tests themselves cannot do this stuff!
|
||||
|
||||
grant codeBase "${codebase.securemock-1.2.jar}" {
|
||||
grant codeBase "${codebase.securemock}" {
|
||||
// needed to access ReflectionFactory (see below)
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
|
||||
// needed for reflection in ibm jdk
|
||||
|
@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
|
|||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.lucene-test-framework-7.1.0.jar}" {
|
||||
grant codeBase "${codebase.lucene-test-framework}" {
|
||||
// needed by RamUsageTester
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
// needed for testing hardlinks in StoreRecoveryTests since we install MockFS
|
||||
|
@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-test-framework-7.1.0.jar}" {
|
|||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.randomizedtesting-runner-2.5.2.jar}" {
|
||||
grant codeBase "${codebase.randomizedtesting-runner}" {
|
||||
// optionally needed for access to private test methods (e.g. beforeClass)
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
// needed to fail tests on uncaught exceptions from other threads
|
||||
|
@ -53,12 +53,12 @@ grant codeBase "${codebase.randomizedtesting-runner-2.5.2.jar}" {
|
|||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.junit-4.12.jar}" {
|
||||
grant codeBase "${codebase.junit}" {
|
||||
// needed for TestClass creation
|
||||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.mocksocket-1.2.jar}" {
|
||||
grant codeBase "${codebase.mocksocket}" {
|
||||
// mocksocket makes and accepts socket connections
|
||||
permission java.net.SocketPermission "*", "accept,connect";
|
||||
};
|
||||
|
@ -70,12 +70,12 @@ grant codeBase "${codebase.elasticsearch-rest-client}" {
|
|||
permission java.net.NetPermission "getProxySelector";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.httpcore-nio-4.4.5.jar}" {
|
||||
grant codeBase "${codebase.httpcore-nio}" {
|
||||
// httpcore makes socket connections for rest tests
|
||||
permission java.net.SocketPermission "*", "connect";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.httpasyncclient-4.1.2.jar}" {
|
||||
grant codeBase "${codebase.httpasyncclient}" {
|
||||
// httpasyncclient makes socket connections for rest tests
|
||||
permission java.net.SocketPermission "*", "connect";
|
||||
// rest client uses system properties which gets the default proxy
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -36,6 +37,7 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
@ -66,6 +68,11 @@ public class IndexGraveyardTests extends ESTestCase {
|
|||
builder.startObject();
|
||||
graveyard.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
if (graveyard.getTombstones().size() > 0) {
|
||||
// check that date properly printed
|
||||
assertThat(Strings.toString(graveyard, false, true),
|
||||
containsString(XContentBuilder.DEFAULT_DATE_PRINTER.print(graveyard.getTombstones().get(0).getDeleteDateInMillis())));
|
||||
}
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes());
|
||||
parser.nextToken(); // the beginning of the parser
|
||||
assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard));
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT;
|
||||
|
||||
/**
|
||||
* Created by nknize on 9/22/17.
|
||||
*/
|
||||
abstract class BaseGeoParsingTestCase extends ESTestCase {
|
||||
protected static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory();
|
||||
|
||||
public abstract void testParsePoint() throws IOException;
|
||||
public abstract void testParseMultiPoint() throws IOException;
|
||||
public abstract void testParseLineString() throws IOException;
|
||||
public abstract void testParseMultiLineString() throws IOException;
|
||||
public abstract void testParsePolygon() throws IOException;
|
||||
public abstract void testParseMultiPolygon() throws IOException;
|
||||
}
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.common.geo;
|
|||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.MultiLineString;
|
||||
|
@ -29,12 +28,11 @@ import com.vividsolutions.jts.geom.Point;
|
|||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
|
@ -55,11 +53,10 @@ import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT
|
|||
/**
|
||||
* Tests for {@code GeoJSONShapeParser}
|
||||
*/
|
||||
public class GeoJSONShapeParserTests extends ESTestCase {
|
||||
public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase {
|
||||
|
||||
private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory();
|
||||
|
||||
public void testParseSimplePoint() throws IOException {
|
||||
@Override
|
||||
public void testParsePoint() throws IOException {
|
||||
XContentBuilder pointGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("type", "Point")
|
||||
|
@ -70,6 +67,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testParseLineString() throws IOException {
|
||||
XContentBuilder lineGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -89,6 +87,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
assertGeometryEquals(jtsGeom(expected), lineGeoJson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testParseMultiLineString() throws IOException {
|
||||
XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -205,7 +204,8 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
|
||||
}
|
||||
|
||||
public void testParsePolygonNoHoles() throws IOException {
|
||||
@Override
|
||||
public void testParsePolygon() throws IOException {
|
||||
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("type", "Polygon")
|
||||
|
@ -344,7 +344,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
Shape shape = ShapeBuilder.parse(parser).build();
|
||||
Shape shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -364,7 +364,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
|
||||
|
@ -384,7 +384,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -404,7 +404,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -432,7 +432,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
Shape shape = ShapeBuilder.parse(parser).build();
|
||||
Shape shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -458,7 +458,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
|
||||
|
@ -484,7 +484,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -510,7 +510,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(JsonXContent.jsonXContent, polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -671,6 +671,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testParseMultiPoint() throws IOException {
|
||||
XContentBuilder multiPointGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -687,6 +688,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
assertGeometryEquals(expected, multiPointGeoJson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testParseMultiPolygon() throws IOException {
|
||||
// test #1: two polygons; one without hole, one with hole
|
||||
XContentBuilder multiPolygonGeoJson = XContentFactory.jsonBuilder()
|
||||
|
@ -882,7 +884,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
XContentParser parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
Shape shape = ShapeBuilder.parse(parser).build();
|
||||
Shape shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -911,7 +913,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -940,7 +942,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertPolygon(shape);
|
||||
|
||||
|
@ -969,7 +971,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
|
||||
|
@ -998,7 +1000,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
|
||||
|
@ -1027,7 +1029,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
|
||||
parser = createParser(polygonGeoJson);
|
||||
parser.nextToken();
|
||||
shape = ShapeBuilder.parse(parser).build();
|
||||
shape = ShapeParser.parse(parser).build();
|
||||
|
||||
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -1035,7 +1037,7 @@ public class GeoJSONShapeParserTests extends ESTestCase {
|
|||
private void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException {
|
||||
XContentParser parser = createParser(geoJson);
|
||||
parser.nextToken();
|
||||
ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build());
|
||||
ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build());
|
||||
}
|
||||
|
||||
private ShapeCollection<Shape> shapeCollection(Shape... shapes) {
|
|
@ -24,10 +24,13 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.CircleBuilder;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiLineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PointBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
|
@ -46,13 +49,13 @@ import static org.hamcrest.Matchers.containsString;
|
|||
public class ShapeBuilderTests extends ESTestCase {
|
||||
|
||||
public void testNewPoint() {
|
||||
Point point = ShapeBuilders.newPoint(-100, 45).build();
|
||||
Point point = new PointBuilder().coordinate(-100, 45).build();
|
||||
assertEquals(-100D, point.getX(), 0.0d);
|
||||
assertEquals(45D, point.getY(), 0.0d);
|
||||
}
|
||||
|
||||
public void testNewRectangle() {
|
||||
Rectangle rectangle = ShapeBuilders.newEnvelope(new Coordinate(-45, 30), new Coordinate(45, -30)).build();
|
||||
Rectangle rectangle = new EnvelopeBuilder(new Coordinate(-45, 30), new Coordinate(45, -30)).build();
|
||||
assertEquals(-45D, rectangle.getMinX(), 0.0d);
|
||||
assertEquals(-30D, rectangle.getMinY(), 0.0d);
|
||||
assertEquals(45D, rectangle.getMaxX(), 0.0d);
|
||||
|
@ -60,7 +63,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNewPolygon() {
|
||||
Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
Polygon polygon = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-45, 30)
|
||||
.coordinate(45, 30)
|
||||
.coordinate(45, -30)
|
||||
|
@ -75,7 +78,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNewPolygon_coordinate() {
|
||||
Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
Polygon polygon = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(new Coordinate(-45, 30))
|
||||
.coordinate(new Coordinate(45, 30))
|
||||
.coordinate(new Coordinate(45, -30))
|
||||
|
@ -90,7 +93,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNewPolygon_coordinates() {
|
||||
Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
Polygon polygon = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinates(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30))
|
||||
).toPolygon();
|
||||
|
||||
|
@ -103,7 +106,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testLineStringBuilder() {
|
||||
// Building a simple LineString
|
||||
ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-130.0, 55.0)
|
||||
.coordinate(-130.0, -40.0)
|
||||
.coordinate(-15.0, -40.0)
|
||||
|
@ -114,7 +117,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.coordinate(-110.0, 55.0)).build();
|
||||
|
||||
// Building a linestring that needs to be wrapped
|
||||
ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(100.0, 50.0)
|
||||
.coordinate(110.0, -40.0)
|
||||
.coordinate(240.0, -40.0)
|
||||
|
@ -127,7 +130,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
// Building a lineString on the dateline
|
||||
ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-180.0, 80.0)
|
||||
.coordinate(-180.0, 40.0)
|
||||
.coordinate(-180.0, -40.0)
|
||||
|
@ -136,7 +139,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
// Building a lineString on the dateline
|
||||
ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180.0, 80.0)
|
||||
.coordinate(180.0, 40.0)
|
||||
.coordinate(180.0, -40.0)
|
||||
|
@ -146,7 +149,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMultiLineString() {
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
new MultiLineStringBuilder()
|
||||
.linestring(new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-100.0, 50.0)
|
||||
.coordinate(50.0, 50.0)
|
||||
|
@ -164,7 +167,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
// LineString that needs to be wrapped
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
new MultiLineStringBuilder()
|
||||
.linestring(new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(150.0, 60.0)
|
||||
.coordinate(200.0, 60.0)
|
||||
|
@ -183,7 +186,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testPolygonSelfIntersection() {
|
||||
PolygonBuilder newPolygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder newPolygon = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-40.0, 50.0)
|
||||
.coordinate(40.0, 50.0)
|
||||
.coordinate(-40.0, -50.0)
|
||||
|
@ -194,31 +197,31 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testGeoCircle() {
|
||||
double earthCircumference = 40075016.69;
|
||||
Circle circle = ShapeBuilders.newCircleBuilder().center(0, 0).radius("100m").build();
|
||||
Circle circle = new CircleBuilder().center(0, 0).radius("100m").build();
|
||||
assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(0, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
circle = ShapeBuilders.newCircleBuilder().center(+180, 0).radius("100m").build();
|
||||
circle = new CircleBuilder().center(+180, 0).radius("100m").build();
|
||||
assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(180, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
circle = ShapeBuilders.newCircleBuilder().center(-180, 0).radius("100m").build();
|
||||
circle = new CircleBuilder().center(-180, 0).radius("100m").build();
|
||||
assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(-180, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
circle = ShapeBuilders.newCircleBuilder().center(0, 90).radius("100m").build();
|
||||
circle = new CircleBuilder().center(0, 90).radius("100m").build();
|
||||
assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(0, 90, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
circle = ShapeBuilders.newCircleBuilder().center(0, -90).radius("100m").build();
|
||||
circle = new CircleBuilder().center(0, -90).radius("100m").build();
|
||||
assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(0, -90, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
double randomLat = (randomDouble() * 180) - 90;
|
||||
double randomLon = (randomDouble() * 360) - 180;
|
||||
double randomRadius = randomIntBetween(1, (int) earthCircumference / 4);
|
||||
circle = ShapeBuilders.newCircleBuilder().center(randomLon, randomLat).radius(randomRadius + "m").build();
|
||||
circle = new CircleBuilder().center(randomLon, randomLat).radius(randomRadius + "m").build();
|
||||
assertEquals((360 * randomRadius) / earthCircumference, circle.getRadius(), 0.00000001);
|
||||
assertEquals(new PointImpl(randomLon, randomLat, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter());
|
||||
}
|
||||
|
||||
public void testPolygonWrapping() {
|
||||
Shape shape = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
Shape shape = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-150.0, 65.0)
|
||||
.coordinate(-250.0, 65.0)
|
||||
.coordinate(-250.0, -65.0)
|
||||
|
@ -231,7 +234,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testLineStringWrapping() {
|
||||
Shape shape = ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
Shape shape = new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-150.0, 65.0)
|
||||
.coordinate(-250.0, 65.0)
|
||||
.coordinate(-250.0, -65.0)
|
||||
|
@ -248,7 +251,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
// expected results: 3 polygons, 1 with a hole
|
||||
|
||||
// a giant c shape
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(174,0)
|
||||
.coordinate(-176,0)
|
||||
.coordinate(-176,3)
|
||||
|
@ -292,7 +295,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
// expected results: 3 polygons, 1 with a hole
|
||||
|
||||
// a giant c shape
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-186,0)
|
||||
.coordinate(-176,0)
|
||||
.coordinate(-176,3)
|
||||
|
@ -331,7 +334,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testComplexShapeWithHole() {
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-85.0018514,37.1311314)
|
||||
.coordinate(-85.0016645,37.1315293)
|
||||
.coordinate(-85.0016246,37.1317069)
|
||||
|
@ -407,7 +410,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testShapeWithHoleAtEdgeEndPoints() {
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-4, 2)
|
||||
.coordinate(4, 2)
|
||||
.coordinate(6, 0)
|
||||
|
@ -430,7 +433,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testShapeWithPointOnDateline() {
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180, 0)
|
||||
.coordinate(176, 4)
|
||||
.coordinate(176, -4)
|
||||
|
@ -443,7 +446,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testShapeWithEdgeAlongDateline() {
|
||||
// test case 1: test the positive side of the dateline
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180, 0)
|
||||
.coordinate(176, 4)
|
||||
.coordinate(180, -4)
|
||||
|
@ -454,7 +457,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
assertPolygon(shape);
|
||||
|
||||
// test case 2: test the negative side of the dateline
|
||||
builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-176, 4)
|
||||
.coordinate(-180, 0)
|
||||
.coordinate(-180, -4)
|
||||
|
@ -467,7 +470,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testShapeWithBoundaryHoles() {
|
||||
// test case 1: test the positive side of the dateline
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-177, 10)
|
||||
.coordinate(176, 15)
|
||||
.coordinate(172, 0)
|
||||
|
@ -486,7 +489,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
assertMultiPolygon(shape);
|
||||
|
||||
// test case 2: test the negative side of the dateline
|
||||
builder = ShapeBuilders.newPolygon(
|
||||
builder = new PolygonBuilder(
|
||||
new CoordinatesBuilder()
|
||||
.coordinate(-176, 15)
|
||||
.coordinate(179, 10)
|
||||
|
@ -510,7 +513,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testShapeWithTangentialHole() {
|
||||
// test a shape with one tangential (shared) vertex (should pass)
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(179, 10)
|
||||
.coordinate(168, 15)
|
||||
.coordinate(164, 0)
|
||||
|
@ -531,7 +534,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testShapeWithInvalidTangentialHole() {
|
||||
// test a shape with one invalid tangential (shared) vertex (should throw exception)
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(179, 10)
|
||||
.coordinate(168, 15)
|
||||
.coordinate(164, 0)
|
||||
|
@ -552,7 +555,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testBoundaryShapeWithTangentialHole() {
|
||||
// test a shape with one tangential (shared) vertex for each hole (should pass)
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-177, 10)
|
||||
.coordinate(176, 15)
|
||||
.coordinate(172, 0)
|
||||
|
@ -579,7 +582,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testBoundaryShapeWithInvalidTangentialHole() {
|
||||
// test shape with two tangential (shared) vertices (should throw exception)
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-177, 10)
|
||||
.coordinate(176, 15)
|
||||
.coordinate(172, 0)
|
||||
|
@ -602,7 +605,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
* Test an enveloping polygon around the max mercator bounds
|
||||
*/
|
||||
public void testBoundaryShape() {
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-180, 90)
|
||||
.coordinate(180, 90)
|
||||
.coordinate(180, -90)
|
||||
|
@ -616,7 +619,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testShapeWithAlternateOrientation() {
|
||||
// cw: should produce a multi polygon spanning hemispheres
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180, 0)
|
||||
.coordinate(176, 4)
|
||||
.coordinate(-176, 4)
|
||||
|
@ -627,7 +630,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
assertPolygon(shape);
|
||||
|
||||
// cw: geo core will convert to ccw across the dateline
|
||||
builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180, 0)
|
||||
.coordinate(-176, 4)
|
||||
.coordinate(176, 4)
|
||||
|
@ -640,7 +643,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidShapeWithConsecutiveDuplicatePoints() {
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(180, 0)
|
||||
.coordinate(176, 4)
|
||||
.coordinate(176, 4)
|
||||
|
|
|
@ -19,13 +19,14 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -33,8 +34,6 @@ import org.junit.AfterClass;
|
|||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
|
||||
|
@ -49,9 +48,7 @@ public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> exte
|
|||
@BeforeClass
|
||||
public static void init() {
|
||||
if (namedWriteableRegistry == null) {
|
||||
List<NamedWriteableRegistry.Entry> shapes = new ArrayList<>();
|
||||
ShapeBuilders.register(shapes);
|
||||
namedWriteableRegistry = new NamedWriteableRegistry(shapes);
|
||||
namedWriteableRegistry = new NamedWriteableRegistry(GeoShapeType.getShapeWriteables());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,9 +79,9 @@ public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> exte
|
|||
}
|
||||
XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
XContentBuilder shuffled = shuffleXContent(builder);
|
||||
XContentParser shapeParser = createParser(shuffled);
|
||||
shapeParser.nextToken();
|
||||
ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser);
|
||||
XContentParser shapeContentParser = createParser(shuffled);
|
||||
shapeContentParser.nextToken();
|
||||
ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser);
|
||||
assertNotSame(testShape, parsedShape);
|
||||
assertEquals(testShape, parsedShape);
|
||||
assertEquals(testShape.hashCode(), parsedShape.hashCode());
|
||||
|
|
|
@ -49,7 +49,7 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase<LineStr
|
|||
}
|
||||
|
||||
static LineStringBuilder mutate(LineStringBuilder original) throws IOException {
|
||||
LineStringBuilder mutation = (LineStringBuilder) copyShape(original);
|
||||
LineStringBuilder mutation = copyShape(original);
|
||||
Coordinate[] coordinates = original.coordinates(false);
|
||||
Coordinate coordinate = randomFrom(coordinates);
|
||||
if (randomBoolean()) {
|
||||
|
@ -65,7 +65,7 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase<LineStr
|
|||
coordinate.y = randomDoubleBetween(-90.0, 90.0, true);
|
||||
}
|
||||
}
|
||||
return mutation.coordinates(coordinates);
|
||||
return LineStringBuilder.class.cast(mutation.coordinates(coordinates));
|
||||
}
|
||||
|
||||
static LineStringBuilder createRandomShape() {
|
||||
|
|
|
@ -68,6 +68,6 @@ public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase<Mu
|
|||
}
|
||||
|
||||
static MultiLineStringBuilder createRandomShape() {
|
||||
return new MultiLineStringBuilder();
|
||||
return MultiLineStringBuilder.class.cast(RandomShapeGenerator.createShape(random(), ShapeType.MULTILINESTRING));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase<MultiPo
|
|||
} else {
|
||||
coordinates = new Coordinate[]{new Coordinate(1.0, 1.0)};
|
||||
}
|
||||
return mutation.coordinates(coordinates);
|
||||
return MultiPointBuilder.class.cast(mutation.coordinates(coordinates));
|
||||
}
|
||||
|
||||
static MultiPointBuilder createRandomShape() {
|
||||
|
|
|
@ -122,7 +122,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
Settings.Builder builder = Settings.builder();
|
||||
Settings updates = Settings.builder().putNull("index.routing.allocation.require._ip")
|
||||
.put("index.some.dyn.setting", 1).build();
|
||||
settings.validate(updates);
|
||||
settings.validate(updates, false);
|
||||
settings.updateDynamicSettings(updates,
|
||||
Settings.builder().put(currentSettings), builder, "node");
|
||||
currentSettings = builder.build();
|
||||
|
@ -160,6 +160,26 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(0, consumer2.get());
|
||||
}
|
||||
|
||||
public void testDependentSettings() {
|
||||
Setting.AffixSetting<String> stringSetting = Setting.affixKeySetting("foo.", "name",
|
||||
(k) -> Setting.simpleString(k, Property.Dynamic, Property.NodeScope));
|
||||
Setting.AffixSetting<Integer> intSetting = Setting.affixKeySetting("foo.", "bar",
|
||||
(k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope), stringSetting);
|
||||
|
||||
AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY,new HashSet<>(Arrays.asList(intSetting, stringSetting)));
|
||||
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||
() -> service.validate(Settings.builder().put("foo.test.bar", 7).build(), true));
|
||||
assertEquals("Missing required setting [foo.test.name] for setting [foo.test.bar]", iae.getMessage());
|
||||
|
||||
service.validate(Settings.builder()
|
||||
.put("foo.test.name", "test")
|
||||
.put("foo.test.bar", 7)
|
||||
.build(), true);
|
||||
|
||||
service.validate(Settings.builder().put("foo.test.bar", 7).build(), false);
|
||||
}
|
||||
|
||||
public void testAddConsumerAffix() {
|
||||
Setting.AffixSetting<Integer> intSetting = Setting.affixKeySetting("foo.", "bar",
|
||||
(k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope));
|
||||
|
@ -585,7 +605,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
Settings.EMPTY,
|
||||
IndexScopedSettings.BUILT_IN_INDEX_SETTINGS);
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||
() -> settings.validate(Settings.builder().put("index.numbe_of_replica", "1").build()));
|
||||
() -> settings.validate(Settings.builder().put("index.numbe_of_replica", "1").build(), false));
|
||||
assertEquals(iae.getMessage(), "unknown setting [index.numbe_of_replica] did you mean [index.number_of_replicas]?");
|
||||
}
|
||||
|
||||
|
@ -595,26 +615,23 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
IndexScopedSettings.BUILT_IN_INDEX_SETTINGS);
|
||||
String unknownMsgSuffix = " please check that any required plugins are installed, or check the breaking changes documentation for" +
|
||||
" removed settings";
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom"));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").build());
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").build(), false);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true)));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true).build(), false));
|
||||
assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true).build()));
|
||||
assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("index.number_of_replicas", true).build()));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("index.number_of_replicas", true).build(), false));
|
||||
assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build()));
|
||||
settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build(), false));
|
||||
assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build()));
|
||||
settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build(),
|
||||
false));
|
||||
assertEquals("illegal value for [index.similarity.classic] cannot redefine built-in similarity", e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -624,12 +641,12 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
|
||||
final ClusterSettings clusterSettings = new ClusterSettings(settings, Collections.emptySet());
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings, false));
|
||||
assertThat(e.getMessage(), startsWith("unknown secure setting [some.secure.setting]"));
|
||||
|
||||
ClusterSettings clusterSettings2 = new ClusterSettings(settings,
|
||||
Collections.singleton(SecureSetting.secureString("some.secure.setting", null)));
|
||||
clusterSettings2.validate(settings);
|
||||
clusterSettings2.validate(settings, false);
|
||||
}
|
||||
|
||||
public void testDiffSecureSettings() {
|
||||
|
@ -722,7 +739,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
IllegalArgumentException ex =
|
||||
expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> settings.validate(Settings.builder().put("logger._root", "boom").build()));
|
||||
() -> settings.validate(Settings.builder().put("logger._root", "boom").build(), false));
|
||||
assertEquals("Unknown level constant [BOOM].", ex.getMessage());
|
||||
assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
|
||||
settings.applySettings(Settings.builder().put("logger._root", "TRACE").build());
|
||||
|
|
|
@ -30,6 +30,7 @@ import java.util.Collections;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -42,6 +43,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class SettingTests extends ESTestCase {
|
||||
|
||||
public void testGet() {
|
||||
Setting<Boolean> booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope);
|
||||
assertFalse(booleanSetting.get(Settings.EMPTY));
|
||||
|
@ -577,6 +579,22 @@ public class SettingTests extends ESTestCase {
|
|||
assertFalse(listAffixSetting.match("foo"));
|
||||
}
|
||||
|
||||
public void testAffixSettingNamespaces() {
|
||||
Setting.AffixSetting<Boolean> setting =
|
||||
Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope));
|
||||
Settings build = Settings.builder()
|
||||
.put("foo.bar.enable", "true")
|
||||
.put("foo.baz.enable", "true")
|
||||
.put("foo.boom.enable", "true")
|
||||
.put("something.else", "true")
|
||||
.build();
|
||||
Set<String> namespaces = setting.getNamespaces(build);
|
||||
assertEquals(3, namespaces.size());
|
||||
assertTrue(namespaces.contains("bar"));
|
||||
assertTrue(namespaces.contains("baz"));
|
||||
assertTrue(namespaces.contains("boom"));
|
||||
}
|
||||
|
||||
public void testAffixAsMap() {
|
||||
Setting.AffixSetting<String> setting = Setting.prefixKeySetting("foo.bar.", key ->
|
||||
Setting.simpleString(key, Property.NodeScope));
|
||||
|
|
|
@ -258,6 +258,16 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
assertPingCount(handleD, handleA, 0);
|
||||
assertPingCount(handleD, handleB, 0);
|
||||
assertPingCount(handleD, handleC, 3);
|
||||
|
||||
zenPingC.close();
|
||||
handleD.counters.clear();
|
||||
logger.info("ping from UZP_D after closing UZP_C");
|
||||
pingResponses = zenPingD.pingAndWait().toList();
|
||||
// check that node does not respond to pings anymore after the ping service has been closed
|
||||
assertThat(pingResponses.size(), equalTo(0));
|
||||
assertPingCount(handleD, handleA, 0);
|
||||
assertPingCount(handleD, handleB, 0);
|
||||
assertPingCount(handleD, handleC, 3);
|
||||
}
|
||||
|
||||
public void testUnknownHostNotCached() throws ExecutionException, InterruptedException {
|
||||
|
|
|
@ -498,7 +498,7 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
assertTrue(index.isSingleType());
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
index.getScopedSettings()
|
||||
.validate(Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build());
|
||||
.validate(Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build(), false);
|
||||
});
|
||||
}
|
||||
{
|
||||
|
|
|
@ -24,13 +24,13 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.geo.builders.PointBuilder;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
|
@ -181,7 +181,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
pointMapper.parse(context.createExternalValueContext(point));
|
||||
|
||||
// Let's add a Dummy Shape
|
||||
Point shape = ShapeBuilders.newPoint(-100, 45).build();
|
||||
Point shape = new PointBuilder(-100, 45).build();
|
||||
shapeMapper.parse(context.createExternalValueContext(shape));
|
||||
|
||||
context = context.createExternalValueContext(generatedValue);
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.geo.builders.PointBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -118,7 +118,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(response.getHits().getTotalHits(), equalTo((long) 1));
|
||||
|
||||
response = client().prepareSearch("test-idx")
|
||||
.setPostFilter(QueryBuilders.geoShapeQuery("field.shape", ShapeBuilders.newPoint(-100, 45)).relation(ShapeRelation.WITHIN))
|
||||
.setPostFilter(QueryBuilders.geoShapeQuery("field.shape", new PointBuilder(-100, 45)).relation(ShapeRelation.WITHIN))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo((long) 1));
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
|
|||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
|
@ -103,7 +104,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Test that orientation parameter correctly parses
|
||||
* Test that coerce parameter correctly parses
|
||||
*/
|
||||
public void testCoerceParsing() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
|
@ -136,6 +137,41 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(coerce, equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("ignore_malformed", "true")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
|
||||
Explicit<Boolean> ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed.value(), equalTo(true));
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("ignore_malformed", "false")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
|
||||
ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed.explicit(), equalTo(true));
|
||||
assertThat(ignoreMalformed.value(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testGeohashConfiguration() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.geo.ShapeRelation;
|
|||
import org.elasticsearch.common.geo.SpatialStrategy;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -200,7 +199,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
|
||||
// see #3878
|
||||
public void testThatXContentSerializationInsideOfArrayWorks() throws Exception {
|
||||
EnvelopeBuilder envelopeBuilder = ShapeBuilders.newEnvelope(new Coordinate(0, 0), new Coordinate(10, 10));
|
||||
EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(0, 0), new Coordinate(10, 10));
|
||||
GeoShapeQueryBuilder geoQuery = QueryBuilders.geoShapeQuery("searchGeometry", envelopeBuilder);
|
||||
JsonXContent.contentBuilder().startArray().value(geoQuery).endArray();
|
||||
}
|
||||
|
|
|
@ -85,6 +85,17 @@ public class UpdateSettingsIT extends ESIntegTestCase {
|
|||
public static class DummySettingPlugin extends Plugin {
|
||||
public static final Setting<String> DUMMY_SETTING = Setting.simpleString("index.dummy",
|
||||
Setting.Property.IndexScope, Setting.Property.Dynamic);
|
||||
|
||||
public static final Setting.AffixSetting<String> DUMMY_ACCOUNT_USER = Setting.affixKeySetting("index.acc.", "user",
|
||||
k -> Setting.simpleString(k, Setting.Property.IndexScope, Setting.Property.Dynamic));
|
||||
public static final Setting<String> DUMMY_ACCOUNT_PW = Setting.affixKeySetting("index.acc.", "pw",
|
||||
k -> Setting.simpleString(k, Setting.Property.IndexScope, Setting.Property.Dynamic), DUMMY_ACCOUNT_USER);
|
||||
|
||||
public static final Setting.AffixSetting<String> DUMMY_ACCOUNT_USER_CLUSTER = Setting.affixKeySetting("cluster.acc.", "user",
|
||||
k -> Setting.simpleString(k, Setting.Property.NodeScope, Setting.Property.Dynamic));
|
||||
public static final Setting<String> DUMMY_ACCOUNT_PW_CLUSTER = Setting.affixKeySetting("cluster.acc.", "pw",
|
||||
k -> Setting.simpleString(k, Setting.Property.NodeScope, Setting.Property.Dynamic), DUMMY_ACCOUNT_USER_CLUSTER);
|
||||
|
||||
@Override
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> {
|
||||
|
@ -95,7 +106,8 @@ public class UpdateSettingsIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Collections.singletonList(DUMMY_SETTING);
|
||||
return Arrays.asList(DUMMY_SETTING, DUMMY_ACCOUNT_PW, DUMMY_ACCOUNT_USER,
|
||||
DUMMY_ACCOUNT_PW_CLUSTER, DUMMY_ACCOUNT_USER_CLUSTER);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,6 +124,124 @@ public class UpdateSettingsIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testUpdateDependentClusterSettings() {
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder()
|
||||
.put("cluster.acc.test.pw", "asdf")).get());
|
||||
assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage());
|
||||
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
|
||||
.put("cluster.acc.test.pw", "asdf")).get());
|
||||
assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage());
|
||||
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
|
||||
.put("cluster.acc.test.pw", "asdf")).setPersistentSettings(Settings.builder()
|
||||
.put("cluster.acc.test.user", "asdf")).get());
|
||||
assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage());
|
||||
|
||||
if (randomBoolean()) {
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
|
||||
.put("cluster.acc.test.pw", "asdf")
|
||||
.put("cluster.acc.test.user", "asdf")).get();
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
|
||||
.putNull("cluster.acc.test.user")).get());
|
||||
assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage());
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
|
||||
.putNull("cluster.acc.test.pw")
|
||||
.putNull("cluster.acc.test.user")).get();
|
||||
} else {
|
||||
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder()
|
||||
.put("cluster.acc.test.pw", "asdf")
|
||||
.put("cluster.acc.test.user", "asdf")).get();
|
||||
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder()
|
||||
.putNull("cluster.acc.test.user")).get());
|
||||
assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage());
|
||||
|
||||
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder()
|
||||
.putNull("cluster.acc.test.pw")
|
||||
.putNull("cluster.acc.test.user")).get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testUpdateDependentIndexSettings() {
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
prepareCreate("test", Settings.builder().put("index.acc.test.pw", "asdf")).get());
|
||||
assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage());
|
||||
|
||||
createIndex("test");
|
||||
for (int i = 0; i < 2; i++) {
|
||||
if (i == 1) {
|
||||
// now do it on a closed index
|
||||
client().admin().indices().prepareClose("test").get();
|
||||
}
|
||||
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareUpdateSettings("test")
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.put("index.acc.test.pw", "asdf"))
|
||||
.execute()
|
||||
.actionGet());
|
||||
assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage());
|
||||
|
||||
// user has no dependency
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareUpdateSettings("test")
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.put("index.acc.test.user", "asdf"))
|
||||
.execute()
|
||||
.actionGet();
|
||||
|
||||
// now we are consistent
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareUpdateSettings("test")
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.put("index.acc.test.pw", "test"))
|
||||
.execute()
|
||||
.actionGet();
|
||||
|
||||
// now try to remove it and make sure it fails
|
||||
iae = expectThrows(IllegalArgumentException.class, () ->
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareUpdateSettings("test")
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.putNull("index.acc.test.user"))
|
||||
.execute()
|
||||
.actionGet());
|
||||
assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage());
|
||||
|
||||
// now we are consistent
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareUpdateSettings("test")
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.putNull("index.acc.test.pw")
|
||||
.putNull("index.acc.test.user"))
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
public void testResetDefault() {
|
||||
createIndex("test");
|
||||
|
||||
|
|
|
@ -91,8 +91,10 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||
assertNull(validationErrors);
|
||||
}
|
||||
{
|
||||
// disabeling `track_total_hits` isn't valid in scroll context
|
||||
// disabling `track_total_hits` isn't valid in scroll context
|
||||
SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder());
|
||||
// make sure we don't set the request cache for a scroll query
|
||||
searchRequest.requestCache(false);
|
||||
searchRequest.scroll(new TimeValue(1000));
|
||||
searchRequest.source().trackTotalHits(false);
|
||||
ActionRequestValidationException validationErrors = searchRequest.validate();
|
||||
|
@ -103,6 +105,8 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||
{
|
||||
// scroll and `from` isn't valid
|
||||
SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder());
|
||||
// make sure we don't set the request cache for a scroll query
|
||||
searchRequest.requestCache(false);
|
||||
searchRequest.scroll(new TimeValue(1000));
|
||||
searchRequest.source().from(10);
|
||||
ActionRequestValidationException validationErrors = searchRequest.validate();
|
||||
|
|
|
@ -82,7 +82,7 @@ public class InternalExtendedStatsTests extends InternalAggregationTestCase<Inte
|
|||
assertEquals(expectedCount, reduced.getCount());
|
||||
// The order in which you add double values in java can give different results. The difference can
|
||||
// be larger for large sum values, so we make the delta in the assertion depend on the values magnitude
|
||||
assertEquals(expectedSum, reduced.getSum(), Math.abs(expectedSum) * 1e-12);
|
||||
assertEquals(expectedSum, reduced.getSum(), Math.abs(expectedSum) * 1e-11);
|
||||
assertEquals(expectedMin, reduced.getMin(), 0d);
|
||||
assertEquals(expectedMax, reduced.getMax(), 0d);
|
||||
// summing squared values, see reason for delta above
|
||||
|
|
|
@ -40,8 +40,8 @@ import org.elasticsearch.common.geo.GeoUtils;
|
|||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PointBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -121,7 +121,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
public void testShapeBuilders() {
|
||||
try {
|
||||
// self intersection polygon
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10)
|
||||
.coordinate(10, 10)
|
||||
.coordinate(-10, 10)
|
||||
|
@ -133,13 +133,13 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// polygon with hole
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()))
|
||||
.build();
|
||||
try {
|
||||
// polygon with overlapping hole
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-5, -5).coordinate(-5, 11).coordinate(5, 11).coordinate(5, -5).close()))
|
||||
|
@ -151,7 +151,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
try {
|
||||
// polygon with intersection holes
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()))
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -6).coordinate(5, -6).coordinate(5, -4).coordinate(-5, -4).close()))
|
||||
|
@ -162,7 +162,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
try {
|
||||
// Common line in polygon
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10)
|
||||
.coordinate(-10, 10)
|
||||
.coordinate(-5, 10)
|
||||
|
@ -177,8 +177,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Multipolygon: polygon with hole and polygon within the whole
|
||||
ShapeBuilders
|
||||
.newMultiPolygon()
|
||||
new MultiPolygonBuilder()
|
||||
.polygon(new PolygonBuilder(
|
||||
new CoordinatesBuilder().coordinate(-10, -10)
|
||||
.coordinate(-10, 10)
|
||||
|
@ -223,7 +222,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create a multipolygon with two polygons. The first is an rectangle of size 10x10
|
||||
// with a hole of size 5x5 equidistant from all sides. This hole in turn contains
|
||||
// the second polygon of size 4x4 equidistant from all sites
|
||||
MultiPolygonBuilder polygon = ShapeBuilders.newMultiPolygon()
|
||||
MultiPolygonBuilder polygon = new MultiPolygonBuilder()
|
||||
.polygon(new PolygonBuilder(
|
||||
new CoordinatesBuilder().coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder()
|
||||
|
@ -238,7 +237,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Point in polygon
|
||||
SearchResponse result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(3, 3)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(3, 3)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("1"));
|
||||
|
@ -246,7 +245,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Point in polygon hole
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(4.5, 4.5)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(4.5, 4.5)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 0);
|
||||
|
||||
|
@ -257,7 +256,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Point on polygon border
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(10.0, 5.0)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(10.0, 5.0)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("1"));
|
||||
|
@ -265,7 +264,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Point on hole border
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(5.0, 2.0)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(5.0, 2.0)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("1"));
|
||||
|
@ -274,21 +273,21 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Point not in polygon
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoDisjointQuery("area", ShapeBuilders.newPoint(3, 3)))
|
||||
.setPostFilter(QueryBuilders.geoDisjointQuery("area", new PointBuilder(3, 3)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 0);
|
||||
|
||||
// Point in polygon hole
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoDisjointQuery("area", ShapeBuilders.newPoint(4.5, 4.5)))
|
||||
.setPostFilter(QueryBuilders.geoDisjointQuery("area", new PointBuilder(4.5, 4.5)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("1"));
|
||||
}
|
||||
|
||||
// Create a polygon that fills the empty area of the polygon defined above
|
||||
PolygonBuilder inverse = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder inverse = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close())
|
||||
.hole(new LineStringBuilder(
|
||||
new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close()));
|
||||
|
@ -300,20 +299,20 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// re-check point on polygon hole
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(4.5, 4.5)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(4.5, 4.5)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("2"));
|
||||
|
||||
// Create Polygon with hole and common edge
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-5, -5).coordinate(-5, 5).coordinate(10, 5).coordinate(10, -5).close()));
|
||||
|
||||
if (withinSupport) {
|
||||
// Polygon WithIn Polygon
|
||||
builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(-30, -30).coordinate(-30, 30).coordinate(30, 30).coordinate(30, -30).close());
|
||||
|
||||
result = client().prepareSearch()
|
||||
|
@ -324,7 +323,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Create a polygon crossing longitude 180.
|
||||
builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close());
|
||||
|
||||
data = jsonBuilder().startObject().field("area", builder).endObject().bytes();
|
||||
|
@ -332,7 +331,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
|
||||
// Create a polygon crossing longitude 180 with hole.
|
||||
builder = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
builder = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(175, -5).coordinate(185, -5).coordinate(185, 5).coordinate(175, 5).close()));
|
||||
|
||||
|
@ -342,25 +341,25 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(174, -4)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(174, -4)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(-174, -4)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(-174, -4)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(180, -4)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(180, -4)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 0);
|
||||
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilders.newPoint(180, -6)))
|
||||
.setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(180, -6)))
|
||||
.execute().actionGet();
|
||||
assertHitCount(result, 1);
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
|
@ -29,6 +30,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -88,6 +90,36 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document
|
||||
*/
|
||||
public void testIgnoreMalformed() throws Exception {
|
||||
// create index
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.addMapping("geometry", "shape", "type=geo_shape,ignore_malformed=true").get());
|
||||
ensureGreen();
|
||||
|
||||
// test self crossing ccw poly not crossing dateline
|
||||
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
|
||||
.startArray("coordinates")
|
||||
.startArray()
|
||||
.startArray().value(176.0).value(15.0).endArray()
|
||||
.startArray().value(-177.0).value(10.0).endArray()
|
||||
.startArray().value(-177.0).value(-10.0).endArray()
|
||||
.startArray().value(176.0).value(-15.0).endArray()
|
||||
.startArray().value(-177.0).value(15.0).endArray()
|
||||
.startArray().value(172.0).value(0.0).endArray()
|
||||
.startArray().value(176.0).value(15.0).endArray()
|
||||
.endArray()
|
||||
.endArray()
|
||||
.endObject().string();
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape",
|
||||
polygonGeoJson));
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
}
|
||||
|
||||
private String findNodeName(String index) {
|
||||
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
||||
IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0);
|
||||
|
|
|
@ -19,7 +19,12 @@
|
|||
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
|
@ -28,12 +33,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -102,7 +101,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()).setRefreshPolicy(IMMEDIATE).get();
|
||||
|
||||
ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setTypes("type1")
|
||||
.setQuery(geoIntersectionQuery("location", shape))
|
||||
|
@ -146,7 +145,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()).setRefreshPolicy(IMMEDIATE).get();
|
||||
|
||||
ShapeBuilder query = ShapeBuilders.newEnvelope(new Coordinate(-122.88, 48.62), new Coordinate(-122.82, 48.54));
|
||||
EnvelopeBuilder query = new EnvelopeBuilder(new Coordinate(-122.88, 48.62), new Coordinate(-122.82, 48.54));
|
||||
|
||||
// This search would fail if both geoshape indexing and geoshape filtering
|
||||
// used the bottom-level optimization in SpatialPrefixTree#recursiveGetNodes.
|
||||
|
@ -171,7 +170,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
createIndex("shapes");
|
||||
ensureGreen();
|
||||
|
||||
ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
|
||||
client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject()
|
||||
.field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get();
|
||||
|
@ -215,7 +214,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
createIndex("shapes", Settings.EMPTY, "shape_type", "_source", "enabled=false");
|
||||
ensureGreen();
|
||||
|
||||
ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45));
|
||||
|
||||
client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject()
|
||||
.field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get();
|
||||
|
@ -226,12 +225,12 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testReusableBuilder() throws IOException {
|
||||
ShapeBuilder polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder polygon = new PolygonBuilder(new CoordinatesBuilder()
|
||||
.coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close())
|
||||
.hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(175, -5).coordinate(185, -5).coordinate(185, 5).coordinate(175, 5).close()));
|
||||
assertUnmodified(polygon);
|
||||
|
||||
ShapeBuilder linestring = ShapeBuilders.newLineString(new CoordinatesBuilder()
|
||||
LineStringBuilder linestring = new LineStringBuilder(new CoordinatesBuilder()
|
||||
.coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close());
|
||||
assertUnmodified(linestring);
|
||||
}
|
||||
|
@ -403,9 +402,9 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery(
|
||||
"location",
|
||||
ShapeBuilders.newGeometryCollection()
|
||||
new GeometryCollectionBuilder()
|
||||
.polygon(
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder().coordinate(99.0, -1.0).coordinate(99.0, 3.0).coordinate(103.0, 3.0).coordinate(103.0, -1.0)
|
||||
new PolygonBuilder(new CoordinatesBuilder().coordinate(99.0, -1.0).coordinate(99.0, 3.0).coordinate(103.0, 3.0).coordinate(103.0, -1.0)
|
||||
.coordinate(99.0, -1.0)))).relation(ShapeRelation.INTERSECTS);
|
||||
SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery())
|
||||
.setPostFilter(filter).get();
|
||||
|
@ -413,24 +412,24 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
assertHitCount(result, 1);
|
||||
filter = QueryBuilders.geoShapeQuery(
|
||||
"location",
|
||||
ShapeBuilders.newGeometryCollection().polygon(
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0).coordinate(193.0, 13.0).coordinate(193.0, -11.0)
|
||||
new GeometryCollectionBuilder().polygon(
|
||||
new PolygonBuilder(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0).coordinate(193.0, 13.0).coordinate(193.0, -11.0)
|
||||
.coordinate(199.0, -11.0)))).relation(ShapeRelation.INTERSECTS);
|
||||
result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery())
|
||||
.setPostFilter(filter).get();
|
||||
assertSearchResponse(result);
|
||||
assertHitCount(result, 0);
|
||||
filter = QueryBuilders.geoShapeQuery("location", ShapeBuilders.newGeometryCollection()
|
||||
.polygon(ShapeBuilders.newPolygon(new CoordinatesBuilder().coordinate(99.0, -1.0).coordinate(99.0, 3.0).coordinate(103.0, 3.0).coordinate(103.0, -1.0).coordinate(99.0, -1.0)))
|
||||
filter = QueryBuilders.geoShapeQuery("location", new GeometryCollectionBuilder()
|
||||
.polygon(new PolygonBuilder(new CoordinatesBuilder().coordinate(99.0, -1.0).coordinate(99.0, 3.0).coordinate(103.0, 3.0).coordinate(103.0, -1.0).coordinate(99.0, -1.0)))
|
||||
.polygon(
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0).coordinate(193.0, 13.0).coordinate(193.0, -11.0)
|
||||
new PolygonBuilder(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0).coordinate(193.0, 13.0).coordinate(193.0, -11.0)
|
||||
.coordinate(199.0, -11.0)))).relation(ShapeRelation.INTERSECTS);
|
||||
result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery())
|
||||
.setPostFilter(filter).get();
|
||||
assertSearchResponse(result);
|
||||
assertHitCount(result, 1);
|
||||
// no shape
|
||||
filter = QueryBuilders.geoShapeQuery("location", ShapeBuilders.newGeometryCollection());
|
||||
filter = QueryBuilders.geoShapeQuery("location", new GeometryCollectionBuilder());
|
||||
result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery())
|
||||
.setPostFilter(filter).get();
|
||||
assertSearchResponse(result);
|
||||
|
|
|
@ -24,7 +24,6 @@ import com.vividsolutions.jts.algorithm.ConvexHull;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.geo.builders.CoordinateCollection;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
|
@ -193,7 +192,7 @@ public class RandomShapeGenerator extends RandomGeoGenerator {
|
|||
p = xRandomPointIn(r, within);
|
||||
coordinatesBuilder.coordinate(p.getX(), p.getY());
|
||||
}
|
||||
CoordinateCollection pcb = (st == ShapeType.MULTIPOINT) ? new MultiPointBuilder(coordinatesBuilder.build()) : new LineStringBuilder(coordinatesBuilder);
|
||||
ShapeBuilder pcb = (st == ShapeType.MULTIPOINT) ? new MultiPointBuilder(coordinatesBuilder.build()) : new LineStringBuilder(coordinatesBuilder);
|
||||
return pcb;
|
||||
case MULTILINESTRING:
|
||||
MultiLineStringBuilder mlsb = new MultiLineStringBuilder();
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test.hamcrest;
|
||||
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
import org.locationtech.spatial4j.shape.impl.GeoCircle;
|
||||
|
@ -34,7 +35,6 @@ import com.vividsolutions.jts.geom.MultiPolygon;
|
|||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.hamcrest.Matcher;
|
||||
|
@ -256,7 +256,7 @@ public class ElasticsearchGeoAssertions {
|
|||
|
||||
public static void assertValidException(XContentParser parser, Class expectedException) {
|
||||
try {
|
||||
ShapeBuilder.parse(parser).build();
|
||||
ShapeParser.parse(parser).build();
|
||||
Assert.fail("process completed successfully when " + expectedException.getName() + " expected");
|
||||
} catch (Exception e) {
|
||||
assert(e.getClass().equals(expectedException)):
|
||||
|
|
|
@ -175,9 +175,10 @@ The following settings are supported:
|
|||
http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html[AWS Multipart Upload API]
|
||||
to split the chunk into several parts, each of `buffer_size` length, and
|
||||
to upload each part in its own request. Note that setting a buffer
|
||||
size lower than `5mb` is not allowed since it will prevents the use of the
|
||||
Multipart API and may result in upload errors. Defaults to the minimum
|
||||
between `100mb` and `5%` of the heap size.
|
||||
size lower than `5mb` is not allowed since it will prevent the use of the
|
||||
Multipart API and may result in upload errors. It is also not possible to
|
||||
set a buffer size greater than `5gb` as it is the maximum upload size
|
||||
allowed by S3. Defaults to the minimum between `100mb` and `5%` of the heap size.
|
||||
|
||||
`canned_acl`::
|
||||
|
||||
|
|
|
@ -368,7 +368,7 @@ And the response:
|
|||
health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
|
||||
yellow open customer 95SQ4TSUT7mWBT7VNHH67A 5 1 0 0 260b 260b
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/95SQ4TSUT7mWBT7VNHH67A/.+/ s/260b/\\d+b/ _cat]
|
||||
// TESTRESPONSE[s/95SQ4TSUT7mWBT7VNHH67A/.+/ s/260b/\\d+\\.?\\d?k?b/ _cat]
|
||||
|
||||
The results of the second command tells us that we now have 1 index named customer and it has 5 primary shards and 1 replica (the defaults) and it contains 0 documents in it.
|
||||
|
||||
|
|
|
@ -1580,6 +1580,8 @@ Converts a JSON string into a structured JSON object.
|
|||
| `add_to_root` | no | false | Flag that forces the serialized json to be injected into the top level of the document. `target_field` must not be set when this option is chosen.
|
||||
|======
|
||||
|
||||
All JSON-supported types will be parsed (null, boolean, number, array, object, string).
|
||||
|
||||
Suppose you provide this configuration of the `json` processor:
|
||||
|
||||
[source,js]
|
||||
|
|
|
@ -86,6 +86,10 @@ by improving point performance on a `geo_shape` field so that `geo_shape` querie
|
|||
optimal on a point only field.
|
||||
| `false`
|
||||
|
||||
|`ignore_malformed` |If true, malformed geojson shapes are ignored. If false (default),
|
||||
malformed geojson shapes throw an exception and reject the whole document.
|
||||
| `false`
|
||||
|
||||
|
||||
|=======================================================================
|
||||
|
||||
|
|
|
@ -59,69 +59,6 @@ Fields referred in a percolator query may exist in any type of the index contain
|
|||
|
||||
=====================================
|
||||
|
||||
[float]
|
||||
==== Influencing query extraction
|
||||
|
||||
As part of indexing the percolator query, the percolator field mapper extracts the query terms and numeric ranges from the provided
|
||||
query and indexes that alongside the query in separate internal fields. The `percolate` query uses these internal fields
|
||||
to build a candidate query from the document being percolated in order to reduce the number of document that need to be verified.
|
||||
|
||||
In case a percolator query contains a `bool` query with must or filter clauses, then the percolator field mapper only has to
|
||||
extract ranges or terms from a single clause. The percolator field mapper will prefer longer terms over shorter terms, because
|
||||
longer terms in general match with less documents. For the same reason it prefers smaller ranges over bigger ranges.
|
||||
|
||||
In general this behaviour works well. However sometimes there are fields in a bool query that shouldn't be taken into account
|
||||
when selecting the best must or filter clause, or fields are known to be more selective than other fields.
|
||||
|
||||
For example a status like field may in fact not work well, because each status matches with many percolator queries and
|
||||
then the candidate query the `percolate` query generates may not be able to filter out that many percolator queries.
|
||||
|
||||
The percolator field mapping allows to configure `boost_fields` in order to indicate to the percolator what fields are
|
||||
important or not important when selecting the best must or filter clause in a `bool` query:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT another_index
|
||||
{
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "percolator",
|
||||
"boost_fields": {
|
||||
"status_field": 0, <1>
|
||||
"price_field": 2 <2>
|
||||
}
|
||||
},
|
||||
"status_field": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"price_field": {
|
||||
"type": "long"
|
||||
},
|
||||
"field": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
<1> A boost of zero hints to the percolator that if there are other clauses in a conjunction query then these should be
|
||||
preferred over this one.
|
||||
|
||||
<2> Any boost higher than 1 overrides the default behaviour when it comes to selecting the best clause. The clause
|
||||
that has the field with the highest boost will be selected from a conjunction query for extraction.
|
||||
|
||||
The steps the percolator field mapper takes when selecting a clause from a conjunction query:
|
||||
|
||||
* If there are clauses that have boosted fields then the clause with highest boost field is selected.
|
||||
* If there are range based clauses and term based clauses then term based clauses are picked over range based clauses
|
||||
* From all term based clauses the clause with longest term is picked.
|
||||
* In the case when there are only range based clauses then the range clause with smallest range is picked over clauses with wider ranges.
|
||||
|
||||
[float]
|
||||
==== Reindexing your percolator queries
|
||||
|
||||
|
|
|
@ -33,3 +33,10 @@ The Search API returns `400 - Bad request` while it would previously return
|
|||
* the number of slices is too large
|
||||
* keep alive for scroll is too large
|
||||
* number of filters in the adjacency matrix aggregation is too large
|
||||
|
||||
|
||||
==== Scroll queries cannot use the request_cache anymore
|
||||
|
||||
Setting `request_cache:true` on a query that creates a scroll ('scroll=1m`)
|
||||
has been deprecated in 6 and will now return a `400 - Bad request`.
|
||||
Scroll queries are not meant to be cached.
|
||||
|
|
|
@ -41,7 +41,7 @@ Images are available for running Elasticsearch as Docker containers. They may be
|
|||
downloaded from the Elastic Docker Registry. The default image ships with
|
||||
{xpack-ref}/index.html[X-Pack] pre-installed.
|
||||
+
|
||||
<<docker>>
|
||||
{ref}/docker.html[Install {es} with Docker]
|
||||
|
||||
[float]
|
||||
[[config-mgmt-tools]]
|
||||
|
@ -66,4 +66,9 @@ include::install/rpm.asciidoc[]
|
|||
|
||||
include::install/windows.asciidoc[]
|
||||
|
||||
include::install/docker.asciidoc[]
|
||||
ifdef::include-xpack[]
|
||||
:edit_url!:
|
||||
include::{xes-repo-dir}/setup/docker.asciidoc[]
|
||||
|
||||
:edit_url:
|
||||
endif::include-xpack[]
|
||||
|
|
|
@ -1,336 +0,0 @@
|
|||
[[docker]]
|
||||
=== Install Elasticsearch with Docker
|
||||
|
||||
Elasticsearch is also available as Docker images.
|
||||
The images use https://hub.docker.com/_/centos/[centos:7] as the base image and
|
||||
are available with {xpack-ref}/xpack-introduction.html[X-Pack].
|
||||
|
||||
A list of all published Docker images and tags can be found in https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found
|
||||
on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub].
|
||||
|
||||
==== Image types
|
||||
|
||||
The images are available in three different configurations or "flavors". The
|
||||
`basic` flavor, which is the default, ships with X-Pack Basic features
|
||||
pre-installed and automatically activated with a free licence. The `platinum`
|
||||
flavor features all X-Pack functionally under a 30-day trial licence. The `oss`
|
||||
flavor does not include X-Pack, and contains only open-source Elasticsearch.
|
||||
|
||||
NOTE: {xpack-ref}/xpack-security.html[X-Pack Security] is enabled in the `platinum`
|
||||
image. To access your cluster, it's necessary to set an initial password for the
|
||||
`elastic` user. The initial password can be set at start up time via the
|
||||
`ELASTIC_PASSWORD` environment variable:
|
||||
|
||||
["source","txt",subs="attributes"]
|
||||
--------------------------------------------
|
||||
docker run -e ELASTIC_PASSWORD=MagicWord {docker-repo}-platinum:{version}
|
||||
--------------------------------------------
|
||||
|
||||
NOTE: The `platinum` image includes a trial license for 30 days. After that, you
|
||||
can obtain one of the https://www.elastic.co/subscriptions[available
|
||||
subscriptions] or revert to a Basic licence. The Basic license is free and
|
||||
includes a selection of X-Pack features.
|
||||
|
||||
Obtaining Elasticsearch for Docker is as simple as issuing a +docker pull+ command against the Elastic Docker registry.
|
||||
|
||||
ifeval::["{release-state}"=="unreleased"]
|
||||
|
||||
WARNING: Version {version} of Elasticsearch has not yet been released, so no
|
||||
Docker image is currently available for this version.
|
||||
|
||||
endif::[]
|
||||
|
||||
ifeval::["{release-state}"!="unreleased"]
|
||||
|
||||
Docker images can be retrieved with the following commands:
|
||||
|
||||
["source","sh",subs="attributes"]
|
||||
--------------------------------------------
|
||||
docker pull {docker-repo}:{version}
|
||||
docker pull {docker-repo}-platinum:{version}
|
||||
docker pull {docker-repo}-oss:{version}
|
||||
--------------------------------------------
|
||||
|
||||
endif::[]
|
||||
|
||||
[[docker-cli-run]]
|
||||
==== Running Elasticsearch from the command line
|
||||
|
||||
[[docker-cli-run-dev-mode]]
|
||||
===== Development mode
|
||||
|
||||
ifeval::["{release-state}"=="unreleased"]
|
||||
|
||||
WARNING: Version {version} of the Elasticsearch Docker image has not yet been released.
|
||||
|
||||
endif::[]
|
||||
|
||||
ifeval::["{release-state}"!="unreleased"]
|
||||
|
||||
Elasticsearch can be quickly started for development or testing use with the following command:
|
||||
|
||||
["source","sh",subs="attributes"]
|
||||
--------------------------------------------
|
||||
docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" {docker-image}
|
||||
--------------------------------------------
|
||||
|
||||
endif::[]
|
||||
|
||||
[[docker-cli-run-prod-mode]]
|
||||
===== Production mode
|
||||
|
||||
[[docker-prod-prerequisites]]
|
||||
[IMPORTANT]
|
||||
=========================
|
||||
|
||||
The `vm.max_map_count` kernel setting needs to be set to at least `262144` for production use.
|
||||
Depending on your platform:
|
||||
|
||||
* Linux
|
||||
+
|
||||
The `vm.max_map_count` setting should be set permanently in /etc/sysctl.conf:
|
||||
+
|
||||
[source,sh]
|
||||
--------------------------------------------
|
||||
$ grep vm.max_map_count /etc/sysctl.conf
|
||||
vm.max_map_count=262144
|
||||
----------------------------------
|
||||
+
|
||||
To apply the setting on a live system type: `sysctl -w vm.max_map_count=262144`
|
||||
+
|
||||
* macOS with https://docs.docker.com/engine/installation/mac/#/docker-for-mac[Docker for Mac]
|
||||
+
|
||||
The `vm.max_map_count` setting must be set within the xhyve virtual machine:
|
||||
+
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
$ screen ~/Library/Containers/com.docker.docker/Data/com.docker.driver.amd64-linux/tty
|
||||
--------------------------------------------
|
||||
+
|
||||
Log in with 'root' and no password.
|
||||
Then configure the `sysctl` setting as you would for Linux:
|
||||
+
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
sysctl -w vm.max_map_count=262144
|
||||
--------------------------------------------
|
||||
+
|
||||
* Windows and macOS with https://www.docker.com/products/docker-toolbox[Docker Toolbox]
|
||||
+
|
||||
The `vm.max_map_count` setting must be set via docker-machine:
|
||||
+
|
||||
["source","txt"]
|
||||
--------------------------------------------
|
||||
docker-machine ssh
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
--------------------------------------------
|
||||
=========================
|
||||
|
||||
The following example brings up a cluster comprising two Elasticsearch nodes.
|
||||
To bring up the cluster, use the <<docker-prod-cluster-composefile,`docker-compose.yml`>> and just type:
|
||||
|
||||
ifeval::["{release-state}"=="unreleased"]
|
||||
|
||||
WARNING: Version {version} of Elasticsearch has not yet been released, so a
|
||||
`docker-compose.yml` is not available for this version.
|
||||
|
||||
endif::[]
|
||||
|
||||
ifeval::["{release-state}"!="unreleased"]
|
||||
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
docker-compose up
|
||||
--------------------------------------------
|
||||
|
||||
endif::[]
|
||||
|
||||
[NOTE]
|
||||
`docker-compose` is not pre-installed with Docker on Linux.
|
||||
Instructions for installing it can be found on the
|
||||
https://docs.docker.com/compose/install/#install-using-pip[Docker Compose webpage].
|
||||
|
||||
The node `elasticsearch` listens on `localhost:9200` while `elasticsearch2`
|
||||
talks to `elasticsearch` over a Docker network.
|
||||
|
||||
This example also uses https://docs.docker.com/engine/tutorials/dockervolumes[Docker named volumes], called `esdata1` and `esdata2` which will be created if not already present.
|
||||
|
||||
[[docker-prod-cluster-composefile]]
|
||||
`docker-compose.yml`:
|
||||
ifeval::["{release-state}"=="unreleased"]
|
||||
|
||||
WARNING: Version {version} of Elasticsearch has not yet been released, so a
|
||||
`docker-compose.yml` is not available for this version.
|
||||
|
||||
endif::[]
|
||||
|
||||
ifeval::["{release-state}"!="unreleased"]
|
||||
["source","yaml",subs="attributes"]
|
||||
--------------------------------------------
|
||||
version: 2.2
|
||||
services:
|
||||
elasticsearch:
|
||||
image: {docker-image}
|
||||
container_name: elasticsearch
|
||||
environment:
|
||||
- cluster.name=docker-cluster
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- esdata1:/usr/share/elasticsearch/data
|
||||
ports:
|
||||
- 9200:9200
|
||||
networks:
|
||||
- esnet
|
||||
elasticsearch2:
|
||||
image: {docker-image}
|
||||
container_name: elasticsearch2
|
||||
environment:
|
||||
- cluster.name=docker-cluster
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
- "discovery.zen.ping.unicast.hosts=elasticsearch"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- esdata2:/usr/share/elasticsearch/data
|
||||
networks:
|
||||
- esnet
|
||||
|
||||
volumes:
|
||||
esdata1:
|
||||
driver: local
|
||||
esdata2:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
esnet:
|
||||
--------------------------------------------
|
||||
endif::[]
|
||||
|
||||
To stop the cluster, type `docker-compose down`. Data volumes will persist, so it's possible to start the cluster again with the same data using `docker-compose up`.
|
||||
To destroy the cluster **and the data volumes**, just type `docker-compose down -v`.
|
||||
|
||||
===== Inspect status of cluster:
|
||||
|
||||
["source","txt"]
|
||||
--------------------------------------------
|
||||
curl http://127.0.0.1:9200/_cat/health
|
||||
1472225929 15:38:49 docker-cluster green 2 2 4 2 0 0 0 0 - 100.0%
|
||||
--------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
Log messages go to the console and are handled by the configured Docker logging driver. By default you can access logs with `docker logs`.
|
||||
|
||||
[[docker-configuration-methods]]
|
||||
==== Configuring Elasticsearch with Docker
|
||||
|
||||
Elasticsearch loads its configuration from files under `/usr/share/elasticsearch/config/`. These configuration files are documented in <<settings>> and <<jvm-options>>.
|
||||
|
||||
The image offers several methods for configuring Elasticsearch settings with the conventional approach being to provide customized files, i.e. `elasticsearch.yml`, but it's also possible to use environment variables to set options:
|
||||
|
||||
===== A. Present the parameters via Docker environment variables
|
||||
For example, to define the cluster name with `docker run` you can pass `-e "cluster.name=mynewclustername"`. Double quotes are required.
|
||||
|
||||
===== B. Bind-mounted configuration
|
||||
Create your custom config file and mount this over the image's corresponding file.
|
||||
For example, bind-mounting a `custom_elasticsearch.yml` with `docker run` can be accomplished with the parameter:
|
||||
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
-v full_path_to/custom_elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
|
||||
--------------------------------------------
|
||||
IMPORTANT: The container **runs Elasticsearch as user `elasticsearch` using uid:gid `1000:1000`**. Bind mounted host directories and files, such as `custom_elasticsearch.yml` above, **need to be accessible by this user**. For the https://www.elastic.co/guide/en/elasticsearch/reference/current/important-settings.html#path-settings[data and log dirs], such as `/usr/share/elasticsearch/data`, write access is required as well. Also see note 1 below.
|
||||
|
||||
===== C. Customized image
|
||||
In some environments, it may make more sense to prepare a custom image containing your configuration. A `Dockerfile` to achieve this may be as simple as:
|
||||
|
||||
["source","sh",subs="attributes"]
|
||||
--------------------------------------------
|
||||
FROM docker.elastic.co/elasticsearch/elasticsearch:{version}
|
||||
COPY --chown=elasticsearch:elasticsearch elasticsearch.yml /usr/share/elasticsearch/config/
|
||||
--------------------------------------------
|
||||
|
||||
You could then build and try the image with something like:
|
||||
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
docker build --tag=elasticsearch-custom .
|
||||
docker run -ti -v /usr/share/elasticsearch/data elasticsearch-custom
|
||||
--------------------------------------------
|
||||
|
||||
===== D. Override the image's default https://docs.docker.com/engine/reference/run/#cmd-default-command-or-options[CMD]
|
||||
|
||||
Options can be passed as command-line options to the Elasticsearch process by
|
||||
overriding the default command for the image. For example:
|
||||
|
||||
["source","sh"]
|
||||
--------------------------------------------
|
||||
docker run <various parameters> bin/elasticsearch -Ecluster.name=mynewclustername
|
||||
--------------------------------------------
|
||||
|
||||
==== Notes for production use and defaults
|
||||
|
||||
We have collected a number of best practices for production use.
|
||||
Any Docker parameters mentioned below assume the use of `docker run`.
|
||||
|
||||
. By default, Elasticsearch runs inside the container as user `elasticsearch` using uid:gid `1000:1000`.
|
||||
+
|
||||
CAUTION: One exception is https://docs.openshift.com/container-platform/3.6/creating_images/guidelines.html#openshift-specific-guidelines[Openshift] which runs containers using an arbitrarily assigned user ID. Openshift will present persistent volumes with the gid set to `0` which will work without any adjustments.
|
||||
+
|
||||
If you are bind-mounting a local directory or file, ensure it is readable by this user, while the <<path-settings,data and log dirs>> additionally require write access. A good strategy is to grant group access to gid `1000` or `0` for the local directory. As an example, to prepare a local directory for storing data through a bind-mount:
|
||||
+
|
||||
mkdir esdatadir
|
||||
chmod g+rwx esdatadir
|
||||
chgrp 1000 esdatadir
|
||||
+
|
||||
As a last resort, you can also force the container to mutate the ownership of any bind-mounts used for the <<path-settings,data and log dirs>> through the environment variable `TAKE_FILE_OWNERSHIP`; in this case they will be owned by uid:gid `1000:0` providing read/write access to the elasticsearch process as required.
|
||||
+
|
||||
. It is important to ensure increased ulimits for <<setting-system-settings,nofile>> and <<max-number-threads-check,nproc>> are available for the Elasticsearch containers. Verify the https://github.com/moby/moby/tree/ea4d1243953e6b652082305a9c3cda8656edab26/contrib/init[init system] for the Docker daemon is already setting those to acceptable values and, if needed, adjust them in the Daemon, or override them per container, for example using `docker run`:
|
||||
+
|
||||
--ulimit nofile=65536:65536
|
||||
+
|
||||
NOTE: One way of checking the Docker daemon defaults for the aforementioned ulimits is by running:
|
||||
+
|
||||
docker run --rm centos:7 /bin/bash -c 'ulimit -Hn && ulimit -Sn && ulimit -Hu && ulimit -Su'
|
||||
+
|
||||
. Swapping needs to be disabled for performance and node stability. This can be
|
||||
achieved through any of the methods mentioned in the
|
||||
<<setup-configuration-memory,Elasticsearch docs>>. If you opt for the
|
||||
`bootstrap.memory_lock: true` approach, apart from defining it through any of
|
||||
the <<docker-configuration-methods,configuration methods>>, you will
|
||||
additionally need the `memlock: true` ulimit, either defined in the
|
||||
https://docs.docker.com/engine/reference/commandline/dockerd/#default-ulimits[Docker
|
||||
Daemon] or specifically set for the container. This is demonstrated above in the
|
||||
<<docker-prod-cluster-composefile,docker-compose.yml>>. If using `docker run`:
|
||||
+
|
||||
-e "bootstrap.memory_lock=true" --ulimit memlock=-1:-1
|
||||
+
|
||||
. The image https://docs.docker.com/engine/reference/builder/#/expose[exposes] TCP ports 9200 and 9300. For clusters it is recommended to randomize the published ports with `--publish-all`, unless you are pinning one container per host.
|
||||
+
|
||||
. Use the `ES_JAVA_OPTS` environment variable to set heap size, e.g. to use 16GB
|
||||
use `-e ES_JAVA_OPTS="-Xms16g -Xmx16g"` with `docker run`.
|
||||
+
|
||||
. Pin your deployments to a specific version of the Elasticsearch Docker image, e.g. +docker.elastic.co/elasticsearch/elasticsearch:{version}+.
|
||||
+
|
||||
. Always use a volume bound on `/usr/share/elasticsearch/data`, as shown in the <<docker-cli-run-prod-mode,production example>>, for the following reasons:
|
||||
+
|
||||
.. The data of your elasticsearch node won't be lost if the container is killed
|
||||
.. Elasticsearch is I/O sensitive and the Docker storage driver is not ideal for fast I/O
|
||||
.. It allows the use of advanced https://docs.docker.com/engine/extend/plugins/#volume-plugins[Docker volume plugins]
|
||||
+
|
||||
. If you are using the devicemapper storage driver, make sure you are not using
|
||||
the default `loop-lvm` mode. Configure docker-engine to use
|
||||
https://docs.docker.com/engine/userguide/storagedriver/device-mapper-driver/#configure-docker-with-devicemapper[direct-lvm]
|
||||
instead.
|
||||
+
|
||||
. Consider centralizing your logs by using a different https://docs.docker.com/engine/admin/logging/overview/[logging driver]. Also note that the default json-file logging driver is not ideally suited for production use.
|
||||
|
||||
|
||||
include::next-steps.asciidoc[]
|
|
@ -19,14 +19,24 @@
|
|||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
|
||||
import org.elasticsearch.ingest.AbstractProcessor;
|
||||
import org.elasticsearch.ingest.ConfigurationUtils;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException;
|
||||
|
@ -64,17 +74,36 @@ public final class JsonProcessor extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public void execute(IngestDocument document) throws Exception {
|
||||
String stringValue = document.getFieldValue(field, String.class);
|
||||
try {
|
||||
Map<String, Object> mapValue = XContentHelper.convertToMap(JsonXContent.jsonXContent, stringValue, false);
|
||||
if (addToRoot) {
|
||||
for (Map.Entry<String, Object> entry : mapValue.entrySet()) {
|
||||
Object fieldValue = document.getFieldValue(field, Object.class);
|
||||
BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString());
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, bytesRef)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
Object value = null;
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
value = null;
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
value = parser.text();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
value = parser.numberValue();
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
value = parser.booleanValue();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
value = parser.map();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
value = parser.list();
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
throw new IllegalArgumentException("cannot read binary value");
|
||||
}
|
||||
if (addToRoot && (value instanceof Map)) {
|
||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
|
||||
document.setFieldValue(entry.getKey(), entry.getValue());
|
||||
}
|
||||
} else if (addToRoot) {
|
||||
throw new IllegalArgumentException("cannot add non-map fields to root of document");
|
||||
} else {
|
||||
document.setFieldValue(targetField, mapValue);
|
||||
document.setFieldValue(targetField, value);
|
||||
}
|
||||
} catch (ElasticsearchParseException e) {
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,15 +21,19 @@ package org.elasticsearch.ingest.common;
|
|||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class JsonProcessorTests extends ESTestCase {
|
||||
|
@ -44,7 +48,7 @@ public class JsonProcessorTests extends ESTestCase {
|
|||
|
||||
Map<String, Object> randomJsonMap = RandomDocumentPicks.randomSource(random());
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap);
|
||||
String randomJson = XContentHelper.convertToJson(builder.bytes(), false);
|
||||
String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON);
|
||||
document.put(randomField, randomJson);
|
||||
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
|
@ -53,16 +57,84 @@ public class JsonProcessorTests extends ESTestCase {
|
|||
assertIngestDocument(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified);
|
||||
}
|
||||
|
||||
public void testInvalidJson() {
|
||||
public void testInvalidValue() {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
document.put("field", "invalid json");
|
||||
document.put("field", "blah blah");
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
|
||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument));
|
||||
assertThat(exception.getCause().getCause().getMessage(), equalTo("Unrecognized token"
|
||||
+ " 'invalid': was expecting ('true', 'false' or 'null')\n"
|
||||
+ " at [Source: invalid json; line: 1, column: 8]"));
|
||||
assertThat(exception.getCause().getMessage(), containsString("Unrecognized token 'blah': " +
|
||||
"was expecting ('true', 'false' or 'null')"));
|
||||
}
|
||||
|
||||
public void testByteArray() {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
document.put("field", new byte[] { 0, 1 });
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
|
||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument));
|
||||
assertThat(exception.getCause().getMessage(), containsString("Unrecognized token 'B': was expecting ('true', 'false' or 'null')"));
|
||||
}
|
||||
|
||||
public void testNull() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
document.put("field", null);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertNull(ingestDocument.getFieldValue("target_field", Object.class));
|
||||
}
|
||||
|
||||
public void testBoolean() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
boolean value = true;
|
||||
document.put("field", value);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value));
|
||||
}
|
||||
|
||||
public void testInteger() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
int value = 3;
|
||||
document.put("field", value);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value));
|
||||
}
|
||||
|
||||
public void testDouble() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
double value = 3.0;
|
||||
document.put("field", value);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value));
|
||||
}
|
||||
|
||||
public void testString() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
String value = "hello world";
|
||||
document.put("field", "\"" + value + "\"");
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value));
|
||||
}
|
||||
|
||||
public void testArray() throws Exception {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
List<Boolean> value = Arrays.asList(true, true, false);
|
||||
document.put("field", value.toString());
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
jsonProcessor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value));
|
||||
}
|
||||
|
||||
public void testFieldMissing() {
|
||||
|
@ -96,4 +168,13 @@ public class JsonProcessorTests extends ESTestCase {
|
|||
|
||||
assertIngestDocument(ingestDocument, expectedIngestDocument);
|
||||
}
|
||||
|
||||
public void testAddBoolToRoot() {
|
||||
JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", true);
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
document.put("field", true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument));
|
||||
assertThat(exception.getMessage(), containsString("cannot add non-map fields to root of document"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,32 @@ teardown:
|
|||
"processors": [
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo"
|
||||
"field" : "foo_object"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo_array"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo_null"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo_string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo_number"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json" : {
|
||||
"field" : "foo_boolean"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -29,7 +54,12 @@ teardown:
|
|||
id: 1
|
||||
pipeline: "1"
|
||||
body: {
|
||||
foo: "{\"hello\": \"world\"}"
|
||||
foo_object: "{\"hello\": \"world\"}",
|
||||
foo_array: "[1, 2, 3]",
|
||||
foo_null: null,
|
||||
foo_string: "\"bla bla\"",
|
||||
foo_number: 3,
|
||||
foo_boolean: "true"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -37,4 +67,9 @@ teardown:
|
|||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.foo.hello: "world" }
|
||||
- match: { _source.foo_object.hello: "world" }
|
||||
- match: { _source.foo_array.0: 1 }
|
||||
- match: { _source.foo_string: "bla bla" }
|
||||
- match: { _source.foo_number: 3 }
|
||||
- is_true: _source.foo_boolean
|
||||
- is_false: _source.foo_null
|
||||
|
|
|
@ -183,6 +183,10 @@ final class PercolateQuery extends Query implements Accountable {
|
|||
return queryStore;
|
||||
}
|
||||
|
||||
Query getCandidateMatchesQuery() {
|
||||
return candidateMatchesQuery;
|
||||
}
|
||||
|
||||
// Comparing identity here to avoid being cached
|
||||
// Note that in theory if the same instance gets used multiple times it could still get cached,
|
||||
// however since we create a new query instance each time we this query this shouldn't happen and thus
|
||||
|
|
|
@ -639,7 +639,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
String name = this.name != null ? this.name : field;
|
||||
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
|
||||
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext, mapUnmappedFieldsAsString);
|
||||
return pft.percolateQuery(name, queryStore, documents, docSearcher);
|
||||
return pft.percolateQuery(name, queryStore, documents, docSearcher, context.indexVersionCreated());
|
||||
}
|
||||
|
||||
public String getField() {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.percolator;
|
|||
|
||||
import org.apache.lucene.document.BinaryRange;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
@ -30,10 +31,12 @@ import org.apache.lucene.index.PointValues;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.CoveringQuery;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LongValuesSource;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
|
@ -44,6 +47,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
|
@ -62,6 +66,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper.RangeType;
|
||||
|
@ -87,9 +92,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.isObject;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
|
||||
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
|
||||
|
||||
public class PercolatorFieldMapper extends FieldMapper {
|
||||
|
@ -113,11 +115,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
static final String EXTRACTION_RESULT_FIELD_NAME = "extraction_result";
|
||||
static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field";
|
||||
static final String RANGE_FIELD_NAME = "range_field";
|
||||
static final String MINIMUM_SHOULD_MATCH_FIELD_NAME = "minimum_should_match_field";
|
||||
|
||||
static class Builder extends FieldMapper.Builder<Builder, PercolatorFieldMapper> {
|
||||
|
||||
private final Supplier<QueryShardContext> queryShardContext;
|
||||
private final Map<String, Float> boostFields = new HashMap<>();
|
||||
|
||||
Builder(String fieldName, Supplier<QueryShardContext> queryShardContext) {
|
||||
super(fieldName, FIELD_TYPE, FIELD_TYPE);
|
||||
|
@ -138,15 +140,13 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
// have to introduce a new field type...
|
||||
RangeFieldMapper rangeFieldMapper = createExtractedRangeFieldBuilder(RANGE_FIELD_NAME, RangeType.IP, context);
|
||||
fieldType.rangeField = rangeFieldMapper.fieldType();
|
||||
NumberFieldMapper minimumShouldMatchFieldMapper = createMinimumShouldMatchField(context);
|
||||
fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.fieldType();
|
||||
context.path().remove();
|
||||
setupFieldType(context);
|
||||
return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(),
|
||||
multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField,
|
||||
extractionResultField, queryBuilderField, rangeFieldMapper, Collections.unmodifiableMap(boostFields));
|
||||
}
|
||||
|
||||
void addBoostField(String field, float boost) {
|
||||
this.boostFields.put(field, boost);
|
||||
extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper);
|
||||
}
|
||||
|
||||
static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) {
|
||||
|
@ -173,30 +173,23 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
return builder.build(context);
|
||||
}
|
||||
|
||||
static NumberFieldMapper createMinimumShouldMatchField(BuilderContext context) {
|
||||
NumberFieldMapper.Builder builder =
|
||||
new NumberFieldMapper.Builder(MINIMUM_SHOULD_MATCH_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER);
|
||||
builder.index(false);
|
||||
builder.store(false);
|
||||
builder.docValues(true);
|
||||
builder.fieldType().setDocValuesType(DocValuesType.NUMERIC);
|
||||
return builder.build(context);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class TypeParser implements FieldMapper.TypeParser {
|
||||
|
||||
@Override
|
||||
public Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(name, parserContext.queryShardContextSupplier());
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("boost_fields")) {
|
||||
if (isObject(propNode)) {
|
||||
for (Map.Entry<?, ?> innerEntry : ((Map<?, ?>) propNode).entrySet()) {
|
||||
String fieldName = nodeStringValue(innerEntry.getKey(), null);
|
||||
builder.addBoostField(fieldName, nodeFloatValue(innerEntry.getValue()));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("boost_fields [" + propNode + "] is not an object");
|
||||
}
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
return new Builder(name, parserContext.queryShardContextSupplier());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -205,6 +198,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
MappedFieldType queryTermsField;
|
||||
MappedFieldType extractionResultField;
|
||||
MappedFieldType queryBuilderField;
|
||||
MappedFieldType minimumShouldMatchField;
|
||||
|
||||
RangeFieldMapper.RangeFieldType rangeField;
|
||||
|
||||
|
@ -220,6 +214,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
extractionResultField = ref.extractionResultField;
|
||||
queryBuilderField = ref.queryBuilderField;
|
||||
rangeField = ref.rangeField;
|
||||
minimumShouldMatchField = ref.minimumShouldMatchField;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -247,23 +242,38 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List<BytesReference> documents,
|
||||
IndexSearcher searcher) throws IOException {
|
||||
IndexSearcher searcher, Version indexVersion) throws IOException {
|
||||
IndexReader indexReader = searcher.getIndexReader();
|
||||
Query candidateMatchesQuery = createCandidateQuery(indexReader);
|
||||
Tuple<List<Query>, Boolean> t = createCandidateQueryClauses(indexReader);
|
||||
BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
|
||||
if (t.v2() && indexVersion.onOrAfter(Version.V_6_1_0)) {
|
||||
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
|
||||
candidateQuery.add(new CoveringQuery(t.v1(), valuesSource), BooleanClause.Occur.SHOULD);
|
||||
} else {
|
||||
for (Query query : t.v1()) {
|
||||
candidateQuery.add(query, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
}
|
||||
// include extractionResultField:failed, because docs with this term have no extractedTermsField
|
||||
// and otherwise we would fail to return these docs. Docs that failed query term extraction
|
||||
// always need to be verified by MemoryIndex:
|
||||
candidateQuery.add(new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED)), BooleanClause.Occur.SHOULD);
|
||||
|
||||
Query verifiedMatchesQuery;
|
||||
// We can only skip the MemoryIndex verification when percolating a single document.
|
||||
// When the document being percolated contains a nested object field then the MemoryIndex contains multiple
|
||||
// documents. In this case the term query that indicates whether memory index verification can be skipped
|
||||
// can incorrectly indicate that non nested queries would match, while their nested variants would not.
|
||||
if (indexReader.maxDoc() == 1) {
|
||||
// We can only skip the MemoryIndex verification when percolating a single non nested document. We cannot
|
||||
// skip MemoryIndex verification when percolating multiple documents, because when terms and
|
||||
// ranges are extracted from IndexReader backed by a RamDirectory holding multiple documents we do
|
||||
// not know to which document the terms belong too and for certain queries we incorrectly emit candidate
|
||||
// matches as actual match.
|
||||
if (t.v2() && indexReader.maxDoc() == 1) {
|
||||
verifiedMatchesQuery = new TermQuery(new Term(extractionResultField.name(), EXTRACTION_COMPLETE));
|
||||
} else {
|
||||
verifiedMatchesQuery = new MatchNoDocsQuery("multiple/nested docs, so no verified matches");
|
||||
verifiedMatchesQuery = new MatchNoDocsQuery("multiple or nested docs or CoveringQuery could not be used");
|
||||
}
|
||||
return new PercolateQuery(name, queryStore, documents, candidateMatchesQuery, searcher, verifiedMatchesQuery);
|
||||
return new PercolateQuery(name, queryStore, documents, candidateQuery.build(), searcher, verifiedMatchesQuery);
|
||||
}
|
||||
|
||||
Query createCandidateQuery(IndexReader indexReader) throws IOException {
|
||||
Tuple<List<Query>, Boolean> createCandidateQueryClauses(IndexReader indexReader) throws IOException {
|
||||
List<BytesRef> extractedTerms = new ArrayList<>();
|
||||
Map<String, List<byte[]>> encodedPointValuesByField = new HashMap<>();
|
||||
|
||||
|
@ -290,14 +300,17 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
if (extractedTerms.size() != 0) {
|
||||
builder.add(new TermInSetQuery(queryTermsField.name(), extractedTerms), Occur.SHOULD);
|
||||
final boolean canUseMinimumShouldMatchField;
|
||||
final List<Query> queries = new ArrayList<>();
|
||||
if (extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery.getMaxClauseCount()) {
|
||||
canUseMinimumShouldMatchField = true;
|
||||
for (BytesRef extractedTerm : extractedTerms) {
|
||||
queries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
|
||||
}
|
||||
} else {
|
||||
canUseMinimumShouldMatchField = false;
|
||||
queries.add(new TermInSetQuery(queryTermsField.name(), extractedTerms));
|
||||
}
|
||||
// include extractionResultField:failed, because docs with this term have no extractedTermsField
|
||||
// and otherwise we would fail to return these docs. Docs that failed query term extraction
|
||||
// always need to be verified by MemoryIndex:
|
||||
builder.add(new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED)), Occur.SHOULD);
|
||||
|
||||
for (Map.Entry<String, List<byte[]>> entry : encodedPointValuesByField.entrySet()) {
|
||||
String rangeFieldName = entry.getKey();
|
||||
|
@ -305,9 +318,9 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
byte[] min = encodedPointValues.get(0);
|
||||
byte[] max = encodedPointValues.get(1);
|
||||
Query query = BinaryRange.newIntersectsQuery(rangeField.name(), encodeRange(rangeFieldName, min, max));
|
||||
builder.add(query, Occur.SHOULD);
|
||||
queries.add(query);
|
||||
}
|
||||
return builder.build();
|
||||
return new Tuple<>(queries, canUseMinimumShouldMatchField);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -317,24 +330,24 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
private KeywordFieldMapper queryTermsField;
|
||||
private KeywordFieldMapper extractionResultField;
|
||||
private BinaryFieldMapper queryBuilderField;
|
||||
private NumberFieldMapper minimumShouldMatchFieldMapper;
|
||||
|
||||
private RangeFieldMapper rangeFieldMapper;
|
||||
private Map<String, Float> boostFields;
|
||||
|
||||
PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo,
|
||||
Supplier<QueryShardContext> queryShardContext,
|
||||
KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField,
|
||||
BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper,
|
||||
Map<String, Float> boostFields) {
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo,
|
||||
Supplier<QueryShardContext> queryShardContext,
|
||||
KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField,
|
||||
BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper,
|
||||
NumberFieldMapper minimumShouldMatchFieldMapper) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.queryShardContext = queryShardContext;
|
||||
this.queryTermsField = queryTermsField;
|
||||
this.extractionResultField = extractionResultField;
|
||||
this.queryBuilderField = queryBuilderField;
|
||||
this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper;
|
||||
this.mapUnmappedFieldAsText = getMapUnmappedFieldAsText(indexSettings);
|
||||
this.rangeFieldMapper = rangeFieldMapper;
|
||||
this.boostFields = boostFields;
|
||||
}
|
||||
|
||||
private static boolean getMapUnmappedFieldAsText(Settings indexSettings) {
|
||||
|
@ -361,6 +374,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
KeywordFieldMapper extractionResultUpdated = (KeywordFieldMapper) extractionResultField.updateFieldType(fullNameToFieldType);
|
||||
BinaryFieldMapper queryBuilderUpdated = (BinaryFieldMapper) queryBuilderField.updateFieldType(fullNameToFieldType);
|
||||
RangeFieldMapper rangeFieldMapperUpdated = (RangeFieldMapper) rangeFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
NumberFieldMapper msmFieldMapperUpdated = (NumberFieldMapper) minimumShouldMatchFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
|
||||
if (updated == this && queryTermsUpdated == queryTermsField && extractionResultUpdated == extractionResultField
|
||||
&& queryBuilderUpdated == queryBuilderField && rangeFieldMapperUpdated == rangeFieldMapper) {
|
||||
|
@ -373,6 +387,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
updated.extractionResultField = extractionResultUpdated;
|
||||
updated.queryBuilderField = queryBuilderUpdated;
|
||||
updated.rangeFieldMapper = rangeFieldMapperUpdated;
|
||||
updated.minimumShouldMatchFieldMapper = msmFieldMapperUpdated;
|
||||
return updated;
|
||||
}
|
||||
|
||||
|
@ -429,7 +444,8 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
FieldType pft = (FieldType) this.fieldType();
|
||||
QueryAnalyzer.Result result;
|
||||
try {
|
||||
result = QueryAnalyzer.analyze(query, boostFields);
|
||||
Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated();
|
||||
result = QueryAnalyzer.analyze(query, indexVersion);
|
||||
} catch (QueryAnalyzer.UnsupportedQueryException e) {
|
||||
doc.add(new Field(pft.extractionResultField.name(), EXTRACTION_FAILED, extractionResultField.fieldType()));
|
||||
return;
|
||||
|
@ -457,6 +473,9 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
for (IndexableField field : fields) {
|
||||
context.doc().add(field);
|
||||
}
|
||||
if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)) {
|
||||
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
|
||||
}
|
||||
}
|
||||
|
||||
static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException {
|
||||
|
@ -491,7 +510,9 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Arrays.<Mapper>asList(queryTermsField, extractionResultField, queryBuilderField, rangeFieldMapper).iterator();
|
||||
return Arrays.<Mapper>asList(
|
||||
queryTermsField, extractionResultField, queryBuilderField, minimumShouldMatchFieldMapper, rangeFieldMapper
|
||||
).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -504,28 +525,6 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
PercolatorFieldMapper percolatorMergeWith = (PercolatorFieldMapper) mergeWith;
|
||||
|
||||
// Updating the boost_fields can be allowed, because it doesn't break previously indexed percolator queries
|
||||
// However the updated boost_fields to completely take effect, percolator queries prior to the mapping update need to be reindexed
|
||||
boostFields = percolatorMergeWith.boostFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (boostFields.isEmpty() == false) {
|
||||
builder.startObject("boost_fields");
|
||||
for (Map.Entry<String, Float> entry : boostFields.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
boolean isMapUnmappedFieldAsText() {
|
||||
return mapUnmappedFieldAsText;
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.lucene.search.spans.SpanQuery;
|
|||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.index.search.ESToParentBlockJoinQuery;
|
||||
|
@ -59,16 +60,15 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static java.util.stream.Collectors.toSet;
|
||||
|
||||
final class QueryAnalyzer {
|
||||
|
||||
private static final Map<Class<? extends Query>, BiFunction<Query, Map<String, Float>, Result>> queryProcessors;
|
||||
private static final Map<Class<? extends Query>, BiFunction<Query, Version, Result>> queryProcessors;
|
||||
|
||||
static {
|
||||
Map<Class<? extends Query>, BiFunction<Query, Map<String, Float>, Result>> map = new HashMap<>();
|
||||
Map<Class<? extends Query>, BiFunction<Query, Version, Result>> map = new HashMap<>();
|
||||
map.put(MatchNoDocsQuery.class, matchNoDocsQuery());
|
||||
map.put(ConstantScoreQuery.class, constantScoreQuery());
|
||||
map.put(BoostQuery.class, boostQuery());
|
||||
|
@ -119,161 +119,196 @@ final class QueryAnalyzer {
|
|||
* Sometimes the query analyzer can't always extract terms or ranges from a sub query, if that happens then
|
||||
* query analysis is stopped and an UnsupportedQueryException is thrown. So that the caller can mark
|
||||
* this query in such a way that the PercolatorQuery always verifies if this query with the MemoryIndex.
|
||||
*
|
||||
* @param query The query to analyze.
|
||||
* @param indexVersion The create version of the index containing the percolator queries.
|
||||
*/
|
||||
static Result analyze(Query query, Map<String, Float> boosts) {
|
||||
static Result analyze(Query query, Version indexVersion) {
|
||||
Class queryClass = query.getClass();
|
||||
if (queryClass.isAnonymousClass()) {
|
||||
// Sometimes queries have anonymous classes in that case we need the direct super class.
|
||||
// (for example blended term query)
|
||||
queryClass = queryClass.getSuperclass();
|
||||
}
|
||||
BiFunction<Query, Map<String, Float>, Result> queryProcessor = queryProcessors.get(queryClass);
|
||||
BiFunction<Query, Version, Result> queryProcessor = queryProcessors.get(queryClass);
|
||||
if (queryProcessor != null) {
|
||||
return queryProcessor.apply(query, boosts);
|
||||
return queryProcessor.apply(query, indexVersion);
|
||||
} else {
|
||||
throw new UnsupportedQueryException(query);
|
||||
}
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> matchNoDocsQuery() {
|
||||
return (query, boosts) -> new Result(true, Collections.emptySet());
|
||||
private static BiFunction<Query, Version, Result> matchNoDocsQuery() {
|
||||
return (query, version) -> new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> constantScoreQuery() {
|
||||
return (query, boosts)-> {
|
||||
private static BiFunction<Query, Version, Result> constantScoreQuery() {
|
||||
return (query, boosts) -> {
|
||||
Query wrappedQuery = ((ConstantScoreQuery) query).getQuery();
|
||||
return analyze(wrappedQuery, boosts);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> boostQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> boostQuery() {
|
||||
return (query, version) -> {
|
||||
Query wrappedQuery = ((BoostQuery) query).getQuery();
|
||||
return analyze(wrappedQuery, boosts);
|
||||
return analyze(wrappedQuery, version);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> termQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> termQuery() {
|
||||
return (query, version) -> {
|
||||
TermQuery termQuery = (TermQuery) query;
|
||||
return new Result(true, Collections.singleton(new QueryExtraction(termQuery.getTerm())));
|
||||
return new Result(true, Collections.singleton(new QueryExtraction(termQuery.getTerm())), 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> termInSetQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> termInSetQuery() {
|
||||
return (query, version) -> {
|
||||
TermInSetQuery termInSetQuery = (TermInSetQuery) query;
|
||||
Set<QueryExtraction> terms = new HashSet<>();
|
||||
PrefixCodedTerms.TermIterator iterator = termInSetQuery.getTermData().iterator();
|
||||
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
|
||||
terms.add(new QueryExtraction(new Term(iterator.field(), term)));
|
||||
}
|
||||
return new Result(true, terms);
|
||||
return new Result(true, terms, 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> synonymQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> synonymQuery() {
|
||||
return (query, version) -> {
|
||||
Set<QueryExtraction> terms = ((SynonymQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet());
|
||||
return new Result(true, terms);
|
||||
return new Result(true, terms, 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> commonTermsQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> commonTermsQuery() {
|
||||
return (query, version) -> {
|
||||
Set<QueryExtraction> terms = ((CommonTermsQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet());
|
||||
return new Result(false, terms);
|
||||
return new Result(false, terms, 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> blendedTermQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> blendedTermQuery() {
|
||||
return (query, version) -> {
|
||||
Set<QueryExtraction> terms = ((BlendedTermQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet());
|
||||
return new Result(true, terms);
|
||||
return new Result(true, terms, 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> phraseQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> phraseQuery() {
|
||||
return (query, version) -> {
|
||||
Term[] terms = ((PhraseQuery) query).getTerms();
|
||||
if (terms.length == 0) {
|
||||
return new Result(true, Collections.emptySet());
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
|
||||
// the longest term is likely to be the rarest,
|
||||
// so from a performance perspective it makes sense to extract that
|
||||
Term longestTerm = terms[0];
|
||||
for (Term term : terms) {
|
||||
if (longestTerm.bytes().length < term.bytes().length) {
|
||||
longestTerm = term;
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<QueryExtraction> extractions = Arrays.stream(terms).map(QueryExtraction::new).collect(toSet());
|
||||
return new Result(false, extractions, extractions.size());
|
||||
} else {
|
||||
// the longest term is likely to be the rarest,
|
||||
// so from a performance perspective it makes sense to extract that
|
||||
Term longestTerm = terms[0];
|
||||
for (Term term : terms) {
|
||||
if (longestTerm.bytes().length < term.bytes().length) {
|
||||
longestTerm = term;
|
||||
}
|
||||
}
|
||||
return new Result(false, Collections.singleton(new QueryExtraction(longestTerm)), 1);
|
||||
}
|
||||
return new Result(false, Collections.singleton(new QueryExtraction(longestTerm)));
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> multiPhraseQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> multiPhraseQuery() {
|
||||
return (query, version) -> {
|
||||
Term[][] terms = ((MultiPhraseQuery) query).getTermArrays();
|
||||
if (terms.length == 0) {
|
||||
return new Result(true, Collections.emptySet());
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
|
||||
Set<QueryExtraction> bestTermArr = null;
|
||||
for (Term[] termArr : terms) {
|
||||
Set<QueryExtraction> queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet());
|
||||
bestTermArr = selectBestExtraction(boosts, bestTermArr, queryExtractions);
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
for (Term[] termArr : terms) {
|
||||
extractions.addAll(Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet()));
|
||||
}
|
||||
return new Result(false, extractions, terms.length);
|
||||
} else {
|
||||
Set<QueryExtraction> bestTermArr = null;
|
||||
for (Term[] termArr : terms) {
|
||||
Set<QueryExtraction> queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet());
|
||||
bestTermArr = selectBestExtraction(bestTermArr, queryExtractions);
|
||||
}
|
||||
return new Result(false, bestTermArr, 1);
|
||||
}
|
||||
return new Result(false, bestTermArr);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> spanTermQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> spanTermQuery() {
|
||||
return (query, version) -> {
|
||||
Term term = ((SpanTermQuery) query).getTerm();
|
||||
return new Result(true, Collections.singleton(new QueryExtraction(term)));
|
||||
return new Result(true, Collections.singleton(new QueryExtraction(term)), 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> spanNearQuery() {
|
||||
return (query, boosts) -> {
|
||||
Set<QueryExtraction> bestClauses = null;
|
||||
private static BiFunction<Query, Version, Result> spanNearQuery() {
|
||||
return (query, version) -> {
|
||||
SpanNearQuery spanNearQuery = (SpanNearQuery) query;
|
||||
for (SpanQuery clause : spanNearQuery.getClauses()) {
|
||||
Result temp = analyze(clause, boosts);
|
||||
bestClauses = selectBestExtraction(boosts, temp.extractions, bestClauses);
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<Result> results = Arrays.stream(spanNearQuery.getClauses()).map(clause -> analyze(clause, version)).collect(toSet());
|
||||
int msm = 0;
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
for (Result result : results) {
|
||||
QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]);
|
||||
if (result.extractions.size() == 1 && t[0].range != null) {
|
||||
if (seenRangeFields.add(t[0].range.fieldName)) {
|
||||
msm += 1;
|
||||
}
|
||||
} else {
|
||||
msm += result.minimumShouldMatch;
|
||||
}
|
||||
extractions.addAll(result.extractions);
|
||||
}
|
||||
return new Result(false, extractions, msm);
|
||||
} else {
|
||||
Set<QueryExtraction> bestClauses = null;
|
||||
for (SpanQuery clause : spanNearQuery.getClauses()) {
|
||||
Result temp = analyze(clause, version);
|
||||
bestClauses = selectBestExtraction(temp.extractions, bestClauses);
|
||||
}
|
||||
return new Result(false, bestClauses, 1);
|
||||
}
|
||||
return new Result(false, bestClauses);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> spanOrQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> spanOrQuery() {
|
||||
return (query, version) -> {
|
||||
Set<QueryExtraction> terms = new HashSet<>();
|
||||
SpanOrQuery spanOrQuery = (SpanOrQuery) query;
|
||||
for (SpanQuery clause : spanOrQuery.getClauses()) {
|
||||
terms.addAll(analyze(clause, boosts).extractions);
|
||||
terms.addAll(analyze(clause, version).extractions);
|
||||
}
|
||||
return new Result(false, terms);
|
||||
return new Result(false, terms, 1);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> spanNotQuery() {
|
||||
return (query, boosts) -> {
|
||||
Result result = analyze(((SpanNotQuery) query).getInclude(), boosts);
|
||||
return new Result(false, result.extractions);
|
||||
private static BiFunction<Query, Version, Result> spanNotQuery() {
|
||||
return (query, version) -> {
|
||||
Result result = analyze(((SpanNotQuery) query).getInclude(), version);
|
||||
return new Result(false, result.extractions, result.minimumShouldMatch);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> spanFirstQuery() {
|
||||
return (query, boosts) -> {
|
||||
Result result = analyze(((SpanFirstQuery) query).getMatch(), boosts);
|
||||
return new Result(false, result.extractions);
|
||||
private static BiFunction<Query, Version, Result> spanFirstQuery() {
|
||||
return (query, version) -> {
|
||||
Result result = analyze(((SpanFirstQuery) query).getMatch(), version);
|
||||
return new Result(false, result.extractions, result.minimumShouldMatch);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> booleanQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> booleanQuery() {
|
||||
return (query, version) -> {
|
||||
BooleanQuery bq = (BooleanQuery) query;
|
||||
List<BooleanClause> clauses = bq.clauses();
|
||||
int minimumShouldMatch = bq.getMinimumNumberShouldMatch();
|
||||
|
@ -292,34 +327,89 @@ final class QueryAnalyzer {
|
|||
}
|
||||
}
|
||||
if (numRequiredClauses > 0) {
|
||||
Set<QueryExtraction> bestClause = null;
|
||||
UnsupportedQueryException uqe = null;
|
||||
for (BooleanClause clause : clauses) {
|
||||
if (clause.isRequired() == false) {
|
||||
// skip must_not clauses, we don't need to remember the things that do *not* match...
|
||||
// skip should clauses, this bq has must clauses, so we don't need to remember should clauses,
|
||||
// since they are completely optional.
|
||||
continue;
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
UnsupportedQueryException uqe = null;
|
||||
List<Result> results = new ArrayList<>(numRequiredClauses);
|
||||
for (BooleanClause clause : clauses) {
|
||||
if (clause.isRequired()) {
|
||||
// skip must_not clauses, we don't need to remember the things that do *not* match...
|
||||
// skip should clauses, this bq has must clauses, so we don't need to remember should clauses,
|
||||
// since they are completely optional.
|
||||
|
||||
try {
|
||||
results.add(analyze(clause.getQuery(), version));
|
||||
} catch (UnsupportedQueryException e) {
|
||||
uqe = e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Result temp;
|
||||
try {
|
||||
temp = analyze(clause.getQuery(), boosts);
|
||||
} catch (UnsupportedQueryException e) {
|
||||
uqe = e;
|
||||
continue;
|
||||
}
|
||||
bestClause = selectBestExtraction(boosts, temp.extractions, bestClause);
|
||||
}
|
||||
if (bestClause != null) {
|
||||
return new Result(false, bestClause);
|
||||
} else {
|
||||
if (uqe != null) {
|
||||
// we're unable to select the best clause and an exception occurred, so we bail
|
||||
throw uqe;
|
||||
if (results.isEmpty()) {
|
||||
if (uqe != null) {
|
||||
// we're unable to select the best clause and an exception occurred, so we bail
|
||||
throw uqe;
|
||||
} else {
|
||||
// We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries,
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
} else {
|
||||
// We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries,
|
||||
return new Result(true, Collections.emptySet());
|
||||
int msm = 0;
|
||||
boolean requiredShouldClauses = minimumShouldMatch > 0 && numOptionalClauses > 0;
|
||||
boolean verified = uqe == null && numProhibitedClauses == 0 && requiredShouldClauses == false;
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
for (Result result : results) {
|
||||
QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]);
|
||||
if (result.extractions.size() == 1 && t[0].range != null) {
|
||||
// In case of range queries each extraction does not simply increment the minimum_should_match
|
||||
// for that percolator query like for a term based extraction, so that can lead to more false
|
||||
// positives for percolator queries with range queries than term based queries.
|
||||
// The is because the way number fields are extracted from the document to be percolated.
|
||||
// Per field a single range is extracted and if a percolator query has two or more range queries
|
||||
// on the same field than the the minimum should match can be higher than clauses in the CoveringQuery.
|
||||
// Therefore right now the minimum should match is incremented once per number field when processing
|
||||
// the percolator query at index time.
|
||||
if (seenRangeFields.add(t[0].range.fieldName)) {
|
||||
msm += 1;
|
||||
}
|
||||
} else {
|
||||
msm += result.minimumShouldMatch;
|
||||
}
|
||||
verified &= result.verified;
|
||||
extractions.addAll(result.extractions);
|
||||
}
|
||||
return new Result(verified, extractions, msm);
|
||||
}
|
||||
} else {
|
||||
Set<QueryExtraction> bestClause = null;
|
||||
UnsupportedQueryException uqe = null;
|
||||
for (BooleanClause clause : clauses) {
|
||||
if (clause.isRequired() == false) {
|
||||
// skip must_not clauses, we don't need to remember the things that do *not* match...
|
||||
// skip should clauses, this bq has must clauses, so we don't need to remember should clauses,
|
||||
// since they are completely optional.
|
||||
continue;
|
||||
}
|
||||
|
||||
Result temp;
|
||||
try {
|
||||
temp = analyze(clause.getQuery(), version);
|
||||
} catch (UnsupportedQueryException e) {
|
||||
uqe = e;
|
||||
continue;
|
||||
}
|
||||
bestClause = selectBestExtraction(temp.extractions, bestClause);
|
||||
}
|
||||
if (bestClause != null) {
|
||||
return new Result(false, bestClause, 1);
|
||||
} else {
|
||||
if (uqe != null) {
|
||||
// we're unable to select the best clause and an exception occurred, so we bail
|
||||
throw uqe;
|
||||
} else {
|
||||
// We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries,
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -329,33 +419,33 @@ final class QueryAnalyzer {
|
|||
disjunctions.add(clause.getQuery());
|
||||
}
|
||||
}
|
||||
return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0, boosts);
|
||||
return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0, version);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> disjunctionMaxQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> disjunctionMaxQuery() {
|
||||
return (query, version) -> {
|
||||
List<Query> disjuncts = ((DisjunctionMaxQuery) query).getDisjuncts();
|
||||
return handleDisjunction(disjuncts, 1, false, boosts);
|
||||
return handleDisjunction(disjuncts, 1, false, version);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> functionScoreQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> functionScoreQuery() {
|
||||
return (query, version) -> {
|
||||
FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) query;
|
||||
Result result = analyze(functionScoreQuery.getSubQuery(), boosts);
|
||||
Result result = analyze(functionScoreQuery.getSubQuery(), version);
|
||||
// If min_score is specified we can't guarantee upfront that this percolator query matches,
|
||||
// so in that case we set verified to false.
|
||||
// (if it matches with the percolator document matches with the extracted terms.
|
||||
// Min score filters out docs, which is different than the functions, which just influences the score.)
|
||||
boolean verified = functionScoreQuery.getMinScore() == null;
|
||||
return new Result(verified, result.extractions);
|
||||
return new Result(verified, result.extractions, result.minimumShouldMatch);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> pointRangeQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> pointRangeQuery() {
|
||||
return (query, version) -> {
|
||||
PointRangeQuery pointRangeQuery = (PointRangeQuery) query;
|
||||
if (pointRangeQuery.getNumDims() != 1) {
|
||||
throw new UnsupportedQueryException(query);
|
||||
|
@ -367,14 +457,13 @@ final class QueryAnalyzer {
|
|||
// Need to check whether upper is not smaller than lower, otherwise NumericUtils.subtract(...) fails IAE
|
||||
// If upper is really smaller than lower then we deal with like MatchNoDocsQuery. (verified and no extractions)
|
||||
if (new BytesRef(lowerPoint).compareTo(new BytesRef(upperPoint)) > 0) {
|
||||
return new Result(true, Collections.emptySet());
|
||||
return new Result(true, Collections.emptySet(), 1);
|
||||
}
|
||||
|
||||
byte[] interval = new byte[16];
|
||||
NumericUtils.subtract(16, 0, prepad(upperPoint), prepad(lowerPoint), interval);
|
||||
return new Result(false, Collections.singleton(new QueryExtraction(
|
||||
new Range(pointRangeQuery.getField(), lowerPoint, upperPoint, interval))
|
||||
));
|
||||
new Range(pointRangeQuery.getField(), lowerPoint, upperPoint, interval))), 1);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -385,82 +474,83 @@ final class QueryAnalyzer {
|
|||
return result;
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> indexOrDocValuesQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> indexOrDocValuesQuery() {
|
||||
return (query, version) -> {
|
||||
IndexOrDocValuesQuery indexOrDocValuesQuery = (IndexOrDocValuesQuery) query;
|
||||
return analyze(indexOrDocValuesQuery.getIndexQuery(), boosts);
|
||||
return analyze(indexOrDocValuesQuery.getIndexQuery(), version);
|
||||
};
|
||||
}
|
||||
|
||||
private static BiFunction<Query, Map<String, Float>, Result> toParentBlockJoinQuery() {
|
||||
return (query, boosts) -> {
|
||||
private static BiFunction<Query, Version, Result> toParentBlockJoinQuery() {
|
||||
return (query, version) -> {
|
||||
ESToParentBlockJoinQuery toParentBlockJoinQuery = (ESToParentBlockJoinQuery) query;
|
||||
Result result = analyze(toParentBlockJoinQuery.getChildQuery(), boosts);
|
||||
return new Result(false, result.extractions);
|
||||
Result result = analyze(toParentBlockJoinQuery.getChildQuery(), version);
|
||||
return new Result(false, result.extractions, result.minimumShouldMatch);
|
||||
};
|
||||
}
|
||||
|
||||
private static Result handleDisjunction(List<Query> disjunctions, int minimumShouldMatch, boolean otherClauses,
|
||||
Map<String, Float> boosts) {
|
||||
boolean verified = minimumShouldMatch <= 1 && otherClauses == false;
|
||||
Set<QueryExtraction> terms = new HashSet<>();
|
||||
for (Query disjunct : disjunctions) {
|
||||
Result subResult = analyze(disjunct, boosts);
|
||||
if (subResult.verified == false) {
|
||||
verified = false;
|
||||
}
|
||||
terms.addAll(subResult.extractions);
|
||||
private static Result handleDisjunction(List<Query> disjunctions, int requiredShouldClauses, boolean otherClauses,
|
||||
Version version) {
|
||||
// Keep track of the msm for each clause:
|
||||
int[] msmPerClause = new int[disjunctions.size()];
|
||||
String[] rangeFieldNames = new String[disjunctions.size()];
|
||||
boolean verified = otherClauses == false;
|
||||
if (version.before(Version.V_6_1_0)) {
|
||||
verified &= requiredShouldClauses <= 1;
|
||||
}
|
||||
return new Result(verified, terms);
|
||||
|
||||
Set<QueryExtraction> terms = new HashSet<>();
|
||||
for (int i = 0; i < disjunctions.size(); i++) {
|
||||
Query disjunct = disjunctions.get(i);
|
||||
Result subResult = analyze(disjunct, version);
|
||||
verified &= subResult.verified;
|
||||
terms.addAll(subResult.extractions);
|
||||
|
||||
QueryExtraction[] t = subResult.extractions.toArray(new QueryExtraction[1]);
|
||||
msmPerClause[i] = subResult.minimumShouldMatch;
|
||||
if (subResult.extractions.size() == 1 && t[0].range != null) {
|
||||
rangeFieldNames[i] = t[0].range.fieldName;
|
||||
}
|
||||
}
|
||||
|
||||
int msm = 0;
|
||||
if (version.onOrAfter(Version.V_6_1_0)) {
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
// Figure out what the combined msm is for this disjunction:
|
||||
// (sum the lowest required clauses, otherwise we're too strict and queries may not match)
|
||||
Arrays.sort(msmPerClause);
|
||||
int limit = Math.min(msmPerClause.length, Math.max(1, requiredShouldClauses));
|
||||
for (int i = 0; i < limit; i++) {
|
||||
if (rangeFieldNames[i] != null) {
|
||||
if (seenRangeFields.add(rangeFieldNames[i])) {
|
||||
msm += 1;
|
||||
}
|
||||
} else {
|
||||
msm += msmPerClause[i];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
msm = 1;
|
||||
}
|
||||
return new Result(verified, terms, msm);
|
||||
}
|
||||
|
||||
static Set<QueryExtraction> selectBestExtraction(Map<String, Float> boostFields, Set<QueryExtraction> extractions1,
|
||||
Set<QueryExtraction> extractions2) {
|
||||
static Set<QueryExtraction> selectBestExtraction(Set<QueryExtraction> extractions1, Set<QueryExtraction> extractions2) {
|
||||
assert extractions1 != null || extractions2 != null;
|
||||
if (extractions1 == null) {
|
||||
return extractions2;
|
||||
} else if (extractions2 == null) {
|
||||
return extractions1;
|
||||
} else {
|
||||
Set<QueryExtraction> filtered1;
|
||||
Set<QueryExtraction> filtered2;
|
||||
if (boostFields.isEmpty() == false) {
|
||||
Predicate<QueryExtraction> predicate = extraction -> {
|
||||
String fieldName = extraction.term != null ? extraction.term.field() : extraction.range.fieldName;
|
||||
float boost = boostFields.getOrDefault(fieldName, 1F);
|
||||
return boost != 0F;
|
||||
};
|
||||
filtered1 = extractions1.stream().filter(predicate).collect(toSet());
|
||||
if (filtered1.isEmpty()) {
|
||||
return extractions2;
|
||||
}
|
||||
filtered2 = extractions2.stream().filter(predicate).collect(toSet());
|
||||
if (filtered2.isEmpty()) {
|
||||
return extractions1;
|
||||
}
|
||||
|
||||
float extraction1LowestBoost = lowestBoost(filtered1, boostFields);
|
||||
float extraction2LowestBoost = lowestBoost(filtered2, boostFields);
|
||||
if (extraction1LowestBoost > extraction2LowestBoost) {
|
||||
return extractions1;
|
||||
} else if (extraction2LowestBoost > extraction1LowestBoost) {
|
||||
return extractions2;
|
||||
}
|
||||
// Step out, because boosts are equal, so pick best extraction on either term or range size.
|
||||
} else {
|
||||
filtered1 = extractions1;
|
||||
filtered2 = extractions2;
|
||||
}
|
||||
|
||||
// Prefer term based extractions over range based extractions:
|
||||
boolean onlyRangeBasedExtractions = true;
|
||||
for (QueryExtraction clause : filtered1) {
|
||||
for (QueryExtraction clause : extractions1) {
|
||||
if (clause.term != null) {
|
||||
onlyRangeBasedExtractions = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (QueryExtraction clause : filtered2) {
|
||||
for (QueryExtraction clause : extractions2) {
|
||||
if (clause.term != null) {
|
||||
onlyRangeBasedExtractions = false;
|
||||
break;
|
||||
|
@ -468,8 +558,8 @@ final class QueryAnalyzer {
|
|||
}
|
||||
|
||||
if (onlyRangeBasedExtractions) {
|
||||
BytesRef extraction1SmallestRange = smallestRange(filtered1);
|
||||
BytesRef extraction2SmallestRange = smallestRange(filtered2);
|
||||
BytesRef extraction1SmallestRange = smallestRange(extractions1);
|
||||
BytesRef extraction2SmallestRange = smallestRange(extractions2);
|
||||
if (extraction1SmallestRange == null) {
|
||||
return extractions2;
|
||||
} else if (extraction2SmallestRange == null) {
|
||||
|
@ -483,8 +573,8 @@ final class QueryAnalyzer {
|
|||
return extractions2;
|
||||
}
|
||||
} else {
|
||||
int extraction1ShortestTerm = minTermLength(filtered1);
|
||||
int extraction2ShortestTerm = minTermLength(filtered2);
|
||||
int extraction1ShortestTerm = minTermLength(extractions1);
|
||||
int extraction2ShortestTerm = minTermLength(extractions2);
|
||||
// keep the clause with longest terms, this likely to be rarest.
|
||||
if (extraction1ShortestTerm >= extraction2ShortestTerm) {
|
||||
return extractions1;
|
||||
|
@ -495,21 +585,11 @@ final class QueryAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
private static float lowestBoost(Set<QueryExtraction> extractions, Map<String, Float> boostFields) {
|
||||
float lowestBoost = Float.POSITIVE_INFINITY;
|
||||
for (QueryExtraction extraction : extractions) {
|
||||
String fieldName = extraction.term != null ? extraction.term.field() : extraction.range.fieldName;
|
||||
float boost = boostFields.getOrDefault(fieldName, 1F);
|
||||
lowestBoost = Math.min(lowestBoost, boost);
|
||||
}
|
||||
return lowestBoost;
|
||||
}
|
||||
|
||||
private static int minTermLength(Set<QueryExtraction> extractions) {
|
||||
// In case there are only range extractions, then we return Integer.MIN_VALUE,
|
||||
// so that selectBestExtraction(...) we are likely to prefer the extractions that contains at least a single extraction
|
||||
if (extractions.stream().filter(queryExtraction -> queryExtraction.term != null).count() == 0 &&
|
||||
extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) {
|
||||
extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) {
|
||||
return Integer.MIN_VALUE;
|
||||
}
|
||||
|
||||
|
@ -538,10 +618,12 @@ final class QueryAnalyzer {
|
|||
|
||||
final Set<QueryExtraction> extractions;
|
||||
final boolean verified;
|
||||
final int minimumShouldMatch;
|
||||
|
||||
Result(boolean verified, Set<QueryExtraction> extractions) {
|
||||
Result(boolean verified, Set<QueryExtraction> extractions, int minimumShouldMatch) {
|
||||
this.extractions = extractions;
|
||||
this.verified = verified;
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.Weight;
|
||||
|
@ -64,6 +65,8 @@ import org.apache.lucene.search.spans.SpanNotQuery;
|
|||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -88,6 +91,7 @@ import java.util.function.Function;
|
|||
|
||||
import static org.elasticsearch.common.network.InetAddresses.forString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
@ -307,9 +311,10 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
IndexSearcher shardSearcher = newSearcher(directoryReader);
|
||||
shardSearcher.setQueryCache(null);
|
||||
|
||||
Version v = Version.V_6_1_0;
|
||||
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
|
||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
TopDocs topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1L, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -317,7 +322,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new LongPoint("long_field", 7L)), new WhitespaceAnalyzer());
|
||||
percolateSearcher = memoryIndex.createSearcher();
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1L, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -326,7 +331,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)),
|
||||
new WhitespaceAnalyzer());
|
||||
percolateSearcher = memoryIndex.createSearcher();
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1L, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -334,7 +339,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new FloatPoint("float_field", 17)), new WhitespaceAnalyzer());
|
||||
percolateSearcher = memoryIndex.createSearcher();
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -342,7 +347,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new DoublePoint("double_field", 21)), new WhitespaceAnalyzer());
|
||||
percolateSearcher = memoryIndex.createSearcher();
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -351,7 +356,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new InetAddressPoint("ip_field",
|
||||
forString("192.168.0.4"))), new WhitespaceAnalyzer());
|
||||
percolateSearcher = memoryIndex.createSearcher();
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
topDocs = shardSearcher.search(query, 1);
|
||||
assertEquals(1, topDocs.totalHits);
|
||||
assertEquals(1, topDocs.scoreDocs.length);
|
||||
|
@ -461,11 +466,99 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
duelRun(queryStore, memoryIndex, shardSearcher);
|
||||
}
|
||||
|
||||
public void testPercolateSmallAndLargeDocument() throws Exception {
|
||||
List<ParseContext.Document> docs = new ArrayList<>();
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST);
|
||||
builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST);
|
||||
addQuery(builder.build(), docs);
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST);
|
||||
builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST);
|
||||
addQuery(builder.build(), docs);
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST);
|
||||
builder.add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST);
|
||||
addQuery(builder.build(), docs);
|
||||
indexWriter.addDocuments(docs);
|
||||
indexWriter.close();
|
||||
directoryReader = DirectoryReader.open(directory);
|
||||
IndexSearcher shardSearcher = newSearcher(directoryReader);
|
||||
shardSearcher.setQueryCache(null);
|
||||
|
||||
Version v = Version.CURRENT;
|
||||
|
||||
try (RAMDirectory directory = new RAMDirectory()) {
|
||||
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
||||
Document document = new Document();
|
||||
document.add(new StringField("field", "value1", Field.Store.NO));
|
||||
document.add(new StringField("field", "value2", Field.Store.NO));
|
||||
iw.addDocument(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("field", "value5", Field.Store.NO));
|
||||
document.add(new StringField("field", "value6", Field.Store.NO));
|
||||
iw.addDocument(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("field", "value3", Field.Store.NO));
|
||||
document.add(new StringField("field", "value4", Field.Store.NO));
|
||||
iw.addDocument(document);
|
||||
}
|
||||
try (IndexReader ir = DirectoryReader.open(directory)){
|
||||
IndexSearcher percolateSearcher = new IndexSearcher(ir);
|
||||
Query query =
|
||||
fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||
assertEquals(2L, topDocs.totalHits);
|
||||
assertEquals(2, topDocs.scoreDocs.length);
|
||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||
|
||||
query = new ConstantScoreQuery(query);
|
||||
topDocs = shardSearcher.search(query, 10);
|
||||
assertEquals(2L, topDocs.totalHits);
|
||||
assertEquals(2, topDocs.scoreDocs.length);
|
||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||
}
|
||||
}
|
||||
|
||||
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
|
||||
try (RAMDirectory directory = new RAMDirectory()) {
|
||||
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
||||
Document document = new Document();
|
||||
for (int i = 0; i < 1025; i++) {
|
||||
int fieldNumber = 2 + i;
|
||||
document.add(new StringField("field", "value" + fieldNumber, Field.Store.NO));
|
||||
}
|
||||
iw.addDocument(document);
|
||||
}
|
||||
try (IndexReader ir = DirectoryReader.open(directory)){
|
||||
IndexSearcher percolateSearcher = new IndexSearcher(ir);
|
||||
PercolateQuery query = (PercolateQuery)
|
||||
fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
|
||||
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
|
||||
assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
|
||||
|
||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||
assertEquals(2L, topDocs.totalHits);
|
||||
assertEquals(2, topDocs.scoreDocs.length);
|
||||
assertEquals(1, topDocs.scoreDocs[0].doc);
|
||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||
|
||||
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
||||
assertEquals(2L, topDocs.totalHits);
|
||||
assertEquals(2, topDocs.scoreDocs.length);
|
||||
assertEquals(1, topDocs.scoreDocs[0].doc);
|
||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
|
||||
boolean requireScore = randomBoolean();
|
||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||
Query percolateQuery = fieldType.percolateQuery("_name", queryStore,
|
||||
Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT);
|
||||
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
|
||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||
|
||||
|
@ -499,7 +592,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
IndexSearcher shardSearcher) throws IOException {
|
||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||
Query percolateQuery = fieldType.percolateQuery("_name", queryStore,
|
||||
Collections.singletonList(new BytesArray("{}")), percolateSearcher);
|
||||
Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT);
|
||||
return shardSearcher.search(percolateQuery, 10);
|
||||
}
|
||||
|
||||
|
|
|
@ -28,10 +28,8 @@ import org.apache.lucene.document.IntPoint;
|
|||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.PrefixCodedTerms;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
|
@ -43,6 +41,7 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
|
@ -115,6 +114,7 @@ import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_PART
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
@ -171,9 +171,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
addQueryFieldMappings();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("field", "term1"));
|
||||
bq.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
bq.add(termQuery1, Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("field", "term2"));
|
||||
bq.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
bq.add(termQuery2, Occur.SHOULD);
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
|
@ -189,6 +189,31 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(fields.size(), equalTo(2));
|
||||
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1"));
|
||||
assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2"));
|
||||
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).numericValue(), equalTo(1L));
|
||||
|
||||
// Now test conjunction:
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(termQuery1, Occur.MUST);
|
||||
bq.add(termQuery2, Occur.MUST);
|
||||
|
||||
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(),
|
||||
documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
document = parseContext.doc();
|
||||
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
|
||||
fields.sort(Comparator.comparing(IndexableField::binaryValue));
|
||||
assertThat(fields.size(), equalTo(2));
|
||||
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1"));
|
||||
assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2"));
|
||||
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).numericValue(), equalTo(2L));
|
||||
}
|
||||
|
||||
public void testExtractRanges() throws Exception {
|
||||
|
@ -212,9 +237,40 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name())));
|
||||
fields.sort(Comparator.comparing(IndexableField::binaryValue));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(15));
|
||||
assertThat(fields.size(), equalTo(2));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 28), equalTo(20));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 12), equalTo(15));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 28), equalTo(20));
|
||||
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).numericValue(), equalTo(1L));
|
||||
|
||||
// Range queries on different fields:
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(rangeQuery1, Occur.MUST);
|
||||
rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field2").fieldType()
|
||||
.rangeQuery(15, 20, true, true, null, null, null, null);
|
||||
bq.add(rangeQuery2, Occur.MUST);
|
||||
|
||||
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
|
||||
mapperService.documentMapperParser(), documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
document = parseContext.doc();
|
||||
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name())));
|
||||
fields.sort(Comparator.comparing(IndexableField::binaryValue));
|
||||
assertThat(fields.size(), equalTo(2));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10));
|
||||
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 28), equalTo(20));
|
||||
assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 8), equalTo(15L));
|
||||
assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 24), equalTo(20L));
|
||||
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).numericValue(), equalTo(2L));
|
||||
}
|
||||
|
||||
public void testExtractTermsAndRanges_failed() throws Exception {
|
||||
|
@ -243,7 +299,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
ParseContext.Document document = parseContext.doc();
|
||||
|
||||
PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
|
||||
assertThat(document.getFields().size(), equalTo(2));
|
||||
assertThat(document.getFields().size(), equalTo(3));
|
||||
assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term"));
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
}
|
||||
|
@ -260,35 +316,57 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
IndexReader indexReader = memoryIndex.createSearcher().getIndexReader();
|
||||
|
||||
BooleanQuery candidateQuery = (BooleanQuery) fieldType.createCandidateQuery(indexReader);
|
||||
assertEquals(3, candidateQuery.clauses().size());
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(0).getOccur());
|
||||
TermInSetQuery termsQuery = (TermInSetQuery) candidateQuery.clauses().get(0).getQuery();
|
||||
Tuple<List<Query>, Boolean> t = fieldType.createCandidateQueryClauses(indexReader);
|
||||
assertTrue(t.v2());
|
||||
List<Query> clauses = t.v1();
|
||||
clauses.sort(Comparator.comparing(Query::toString));
|
||||
assertEquals(15, clauses.size());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":_field3\u0000me", clauses.get(0).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":_field3\u0000unhide", clauses.get(1).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000brown", clauses.get(2).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000dog", clauses.get(3).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000fox", clauses.get(4).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000jumps", clauses.get(5).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000lazy", clauses.get(6).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000over", clauses.get(7).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000quick", clauses.get(8).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field1\u0000the", clauses.get(9).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field2\u0000more", clauses.get(10).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field2\u0000some", clauses.get(11).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field2\u0000text", clauses.get(12).toString());
|
||||
assertEquals(fieldType.queryTermsField.name() + ":field4\u0000123", clauses.get(13).toString());
|
||||
assertThat(clauses.get(14).toString(), containsString(fieldName + ".range_field:<ranges:"));
|
||||
}
|
||||
|
||||
PrefixCodedTerms terms = termsQuery.getTermData();
|
||||
assertThat(terms.size(), equalTo(14L));
|
||||
PrefixCodedTerms.TermIterator termIterator = terms.iterator();
|
||||
assertTermIterator(termIterator, "_field3\u0000me", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "_field3\u0000unhide", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000brown", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000dog", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000fox", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000jumps", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000lazy", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000over", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000quick", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field1\u0000the", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field2\u0000more", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field2\u0000some", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field2\u0000text", fieldType.queryTermsField.name());
|
||||
assertTermIterator(termIterator, "field4\u0000123", fieldType.queryTermsField.name());
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(1).getOccur());
|
||||
assertEquals(new TermQuery(new Term(fieldType.extractionResultField.name(), EXTRACTION_FAILED)),
|
||||
candidateQuery.clauses().get(1).getQuery());
|
||||
public void testCreateCandidateQuery_largeDocument() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(2).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(2).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:"));
|
||||
MemoryIndex memoryIndex = new MemoryIndex(false);
|
||||
StringBuilder text = new StringBuilder();
|
||||
for (int i = 0; i < 1023; i++) {
|
||||
text.append(i).append(' ');
|
||||
}
|
||||
memoryIndex.addField("field1", text.toString(), new WhitespaceAnalyzer());
|
||||
memoryIndex.addField(new LongPoint("field2", 10L), new WhitespaceAnalyzer());
|
||||
IndexReader indexReader = memoryIndex.createSearcher().getIndexReader();
|
||||
|
||||
Tuple<List<Query>, Boolean> t = fieldType.createCandidateQueryClauses(indexReader);
|
||||
assertTrue(t.v2());
|
||||
List<Query> clauses = t.v1();
|
||||
assertEquals(1024, clauses.size());
|
||||
assertThat(clauses.get(1023).toString(), containsString(fieldName + ".range_field:<ranges:"));
|
||||
|
||||
// Now push it over the edge, so that it falls back using TermInSetQuery
|
||||
memoryIndex.addField("field2", "value", new WhitespaceAnalyzer());
|
||||
indexReader = memoryIndex.createSearcher().getIndexReader();
|
||||
t = fieldType.createCandidateQueryClauses(indexReader);
|
||||
assertFalse(t.v2());
|
||||
clauses = t.v1();
|
||||
assertEquals(2, clauses.size());
|
||||
TermInSetQuery termInSetQuery = (TermInSetQuery) clauses.get(0);
|
||||
assertEquals(1024, termInSetQuery.getTermData().size());
|
||||
assertThat(clauses.get(1).toString(), containsString(fieldName + ".range_field:<ranges:"));
|
||||
}
|
||||
|
||||
public void testCreateCandidateQuery_numberFields() throws Exception {
|
||||
|
@ -307,38 +385,17 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
IndexReader indexReader = memoryIndex.createSearcher().getIndexReader();
|
||||
|
||||
BooleanQuery candidateQuery = (BooleanQuery) fieldType.createCandidateQuery(indexReader);
|
||||
assertEquals(8, candidateQuery.clauses().size());
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(0).getOccur());
|
||||
assertEquals(new TermQuery(new Term(fieldType.extractionResultField.name(), EXTRACTION_FAILED)),
|
||||
candidateQuery.clauses().get(0).getQuery());
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(1).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(2).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(2).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(3).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(3).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(4).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(4).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(5).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(5).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(6).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(6).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
|
||||
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(7).getOccur());
|
||||
assertThat(candidateQuery.clauses().get(7).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
}
|
||||
|
||||
private void assertTermIterator(PrefixCodedTerms.TermIterator termIterator, String expectedValue, String expectedField) {
|
||||
assertThat(termIterator.next().utf8ToString(), equalTo(expectedValue));
|
||||
assertThat(termIterator.field(), equalTo(expectedField));
|
||||
Tuple<List<Query>, Boolean> t = fieldType.createCandidateQueryClauses(indexReader);
|
||||
assertThat(t.v2(), is(true));
|
||||
List<Query> clauses = t.v1();
|
||||
assertEquals(7, clauses.size());
|
||||
assertThat(clauses.get(0).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(1).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(2).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(3).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(4).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(5).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
assertThat(clauses.get(6).toString(), containsString(fieldName + ".range_field:<ranges:[["));
|
||||
}
|
||||
|
||||
public void testPercolatorFieldMapper() throws Exception {
|
||||
|
@ -488,7 +545,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.field("query_field2", queryBuilder)
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(12)); // also includes all other meta fields
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields
|
||||
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue();
|
||||
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
|
||||
|
||||
|
@ -518,7 +575,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.field("query_field", queryBuilder)
|
||||
.endObject().endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(9)); // also includes all other meta fields
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
|
||||
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
|
||||
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
|
||||
|
||||
|
@ -529,7 +586,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(9)); // also includes all other meta fields
|
||||
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
|
||||
queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
|
||||
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
|
||||
|
||||
|
@ -741,90 +798,6 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
return Arrays.copyOfRange(source, offset, offset + length);
|
||||
}
|
||||
|
||||
public void testBoostFields() throws Exception {
|
||||
IndexService indexService = createIndex("another_index");
|
||||
MapperService mapperService = indexService.mapperService();
|
||||
|
||||
String mapper = XContentFactory.jsonBuilder().startObject().startObject("doc")
|
||||
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
|
||||
.startObject("properties")
|
||||
.startObject("status").field("type", "keyword").endObject()
|
||||
.startObject("update_field").field("type", "keyword").endObject()
|
||||
.startObject("price").field("type", "long").endObject()
|
||||
.startObject("query1").field("type", "percolator")
|
||||
.startObject("boost_fields").field("status", 0).field("updated_field", 2).endObject()
|
||||
.endObject()
|
||||
.startObject("query2").field("type", "percolator").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(new TermQuery(new Term("status", "updated")), Occur.FILTER);
|
||||
bq.add(LongPoint.newRangeQuery("price", 5, 10), Occur.FILTER);
|
||||
|
||||
// Boost fields will ignore status_field:
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query1");
|
||||
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
|
||||
mapperService.documentMapperParser(), documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
ParseContext.Document document = parseContext.doc();
|
||||
PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
assertThat(document.getFields(fieldType.queryTermsField.name()).length, equalTo(0));
|
||||
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(LongPoint.decodeDimension(subByteArray(fields.get(0).binaryValue().bytes, 8, 8), 0), equalTo(5L));
|
||||
assertThat(LongPoint.decodeDimension(subByteArray(fields.get(0).binaryValue().bytes, 24, 8), 0), equalTo(10L));
|
||||
|
||||
// No boost fields, so default extraction logic:
|
||||
fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query2");
|
||||
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(),
|
||||
documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
document = parseContext.doc();
|
||||
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0));
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("status\0updated"));
|
||||
|
||||
// Second clause is extracted, because it is boosted by 2:
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(new TermQuery(new Term("status", "updated")), Occur.FILTER);
|
||||
bq.add(new TermQuery(new Term("updated_field", "done")), Occur.FILTER);
|
||||
|
||||
fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query1");
|
||||
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(),
|
||||
documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
document = parseContext.doc();
|
||||
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0));
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("updated_field\0done"));
|
||||
|
||||
// First clause is extracted, because default logic:
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(new TermQuery(new Term("status", "updated")), Occur.FILTER);
|
||||
bq.add(new TermQuery(new Term("updated_field", "done")), Occur.FILTER);
|
||||
|
||||
fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query2");
|
||||
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(),
|
||||
documentMapper, null, null);
|
||||
fieldMapper.processQuery(bq.build(), parseContext);
|
||||
document = parseContext.doc();
|
||||
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
|
||||
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
|
||||
assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0));
|
||||
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
|
||||
assertThat(fields.size(), equalTo(1));
|
||||
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("status\0updated"));
|
||||
}
|
||||
|
||||
// Just so that we store scripts in percolator queries, but not really execute these scripts.
|
||||
public static class FoolMeScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
|
|
|
@ -193,6 +193,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
|
|||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
|
||||
.get();
|
||||
logger.info("response={}", response);
|
||||
assertHitCount(response, 2);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("3"));
|
||||
assertThat(response.getHits().getAt(1).getId(), equalTo("1"));
|
||||
|
@ -849,34 +850,4 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
|
|||
assertThat(item.getFailureMessage(), containsString("[test/type/6] couldn't be found"));
|
||||
}
|
||||
|
||||
public void testBoostFields() throws Exception {
|
||||
XContentBuilder mappingSource = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("status").field("type", "keyword").endObject()
|
||||
.startObject("price").field("type", "long").endObject()
|
||||
.startObject("query").field("type", "percolator")
|
||||
.startObject("boost_fields").field("status", 0.0F).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", mappingSource));
|
||||
|
||||
client().prepareIndex("test", "type", "q1")
|
||||
.setSource(jsonBuilder().startObject().field("query", boolQuery()
|
||||
.must(matchQuery("status", "sold"))
|
||||
.must(matchQuery("price", 100))
|
||||
).endObject())
|
||||
.get();
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(new PercolateQueryBuilder("query",
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.field("status", "sold")
|
||||
.field("price", 100)
|
||||
.endObject().bytes(), XContentType.JSON))
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("q1"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.apache.lucene.search.spans.SpanNotQuery;
|
|||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
|
||||
|
@ -63,12 +64,9 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -84,8 +82,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
public void testExtractQueryMetadata_termQuery() {
|
||||
TermQuery termQuery = new TermQuery(new Term("_field", "_term"));
|
||||
Result result = analyze(termQuery, Collections.emptyMap());
|
||||
Result result = analyze(termQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo(termQuery.getTerm().field()));
|
||||
|
@ -94,8 +93,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
public void testExtractQueryMetadata_termsQuery() {
|
||||
TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2"));
|
||||
Result result = analyze(termsQuery, Collections.emptyMap());
|
||||
Result result = analyze(termsQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
|
@ -107,23 +107,55 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
public void testExtractQueryMetadata_phraseQuery() {
|
||||
PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2");
|
||||
Result result = analyze(phraseQuery, Collections.emptyMap());
|
||||
Result result = analyze(phraseQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field()));
|
||||
assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes()));
|
||||
assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field()));
|
||||
assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes()));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_multiPhraseQuery() {
|
||||
MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder()
|
||||
.add(new Term("_field", "_term1"))
|
||||
.add(new Term[] {new Term("_field", "_term2"), new Term("_field", "_term3")})
|
||||
.add(new Term[] {new Term("_field", "_term4"), new Term("_field", "_term5")})
|
||||
.add(new Term[] {new Term("_field", "_term6")})
|
||||
.build();
|
||||
Result result = analyze(multiPhraseQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(4));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(6));
|
||||
assertThat(terms.get(0).field(), equalTo("_field"));
|
||||
assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_term1"));
|
||||
assertThat(terms.get(1).field(), equalTo("_field"));
|
||||
assertThat(terms.get(1).bytes().utf8ToString(), equalTo("_term2"));
|
||||
assertThat(terms.get(2).field(), equalTo("_field"));
|
||||
assertThat(terms.get(2).bytes().utf8ToString(), equalTo("_term3"));
|
||||
assertThat(terms.get(3).field(), equalTo("_field"));
|
||||
assertThat(terms.get(3).bytes().utf8ToString(), equalTo("_term4"));
|
||||
assertThat(terms.get(4).field(), equalTo("_field"));
|
||||
assertThat(terms.get(4).bytes().utf8ToString(), equalTo("_term5"));
|
||||
assertThat(terms.get(5).field(), equalTo("_field"));
|
||||
assertThat(terms.get(5).bytes().utf8ToString(), equalTo("_term6"));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_multiPhraseQuery_pre6dot1() {
|
||||
MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder()
|
||||
.add(new Term("_field", "_long_term"))
|
||||
.add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_term")})
|
||||
.add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")})
|
||||
.add(new Term[] {new Term("_field", "_very_long_term")})
|
||||
.build();
|
||||
Result result = analyze(multiPhraseQuery, Collections.emptyMap());
|
||||
Result result = analyze(multiPhraseQuery, Version.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo("_field"));
|
||||
|
@ -131,6 +163,39 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "term0"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
PhraseQuery phraseQuery = new PhraseQuery("_field", "term1", "term2");
|
||||
builder.add(phraseQuery, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery.Builder subBuilder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field1", "term4"));
|
||||
subBuilder.add(termQuery2, BooleanClause.Occur.MUST);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field3", "term5"));
|
||||
subBuilder.add(termQuery3, BooleanClause.Occur.MUST);
|
||||
builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(5));
|
||||
assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field()));
|
||||
assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes()));
|
||||
assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[0].field()));
|
||||
assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[0].bytes()));
|
||||
assertThat(terms.get(2).field(), equalTo(phraseQuery.getTerms()[1].field()));
|
||||
assertThat(terms.get(2).bytes(), equalTo(phraseQuery.getTerms()[1].bytes()));
|
||||
assertThat(terms.get(3).field(), equalTo(termQuery2.getTerm().field()));
|
||||
assertThat(terms.get(3).bytes(), equalTo(termQuery2.getTerm().bytes()));
|
||||
assertThat(terms.get(4).field(), equalTo(termQuery3.getTerm().field()));
|
||||
assertThat(terms.get(4).bytes(), equalTo(termQuery3.getTerm().bytes()));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_pre6dot1() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
|
@ -145,8 +210,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Collections.emptyMap());
|
||||
Result result = analyze(booleanQuery, Version.V_6_0_0);
|
||||
assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(3));
|
||||
|
@ -158,6 +224,50 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes()));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_msm() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(2);
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2"));
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3"));
|
||||
builder.add(termQuery3, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
List<QueryExtraction> extractions = new ArrayList<>(result.extractions);
|
||||
extractions.sort(Comparator.comparing(extraction -> extraction.term));
|
||||
assertThat(extractions.size(), equalTo(3));
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1")));
|
||||
assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2")));
|
||||
assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3")));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_msm_pre6dot1() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(2);
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2"));
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3"));
|
||||
builder.add(termQuery3, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Version.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> extractions = new ArrayList<>(result.extractions);
|
||||
extractions.sort(Comparator.comparing(extraction -> extraction.term));
|
||||
assertThat(extractions.size(), equalTo(3));
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1")));
|
||||
assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2")));
|
||||
assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3")));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_onlyShould() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
|
@ -173,8 +283,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Collections.emptyMap());
|
||||
Result result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryAnalyzer.QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(4));
|
||||
|
@ -196,12 +307,16 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(phraseQuery, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, Collections.emptyMap());
|
||||
Result result = analyze(booleanQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field()));
|
||||
assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes()));
|
||||
assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field()));
|
||||
assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes()));
|
||||
}
|
||||
|
||||
public void testExactMatch_booleanQuery() {
|
||||
|
@ -210,59 +325,119 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2"));
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
Result result = analyze(builder.build(), Collections.emptyMap());
|
||||
Result result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("All clauses are exact, so candidate matches are verified", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
PhraseQuery phraseQuery1 = new PhraseQuery("_field", "_term1", "_term2");
|
||||
builder.add(phraseQuery1, BooleanClause.Occur.SHOULD);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Clause isn't exact, so candidate matches are not verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(phraseQuery1, BooleanClause.Occur.SHOULD);
|
||||
PhraseQuery phraseQuery2 = new PhraseQuery("_field", "_term3", "_term4");
|
||||
builder.add(phraseQuery2, BooleanClause.Occur.SHOULD);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("No clause is exact, so candidate matches are not verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, BooleanClause.Occur.MUST_NOT);
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("There is a must_not clause, so candidate matches are not verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(randomIntBetween(2, 32));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
assertThat("Minimum match is >= 1, so candidate matches are not verified", result.verified, is(false));
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Minimum match has not impact on whether the result is verified", result.verified, is(true));
|
||||
assertThat("msm is at least two so result.minimumShouldMatch should 2 too", result.minimumShouldMatch, equalTo(2));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
assertThat("Single required clause, so candidate matches are verified", result.verified, is(false));
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Also required clauses are taken into account whether the result is verified", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER);
|
||||
builder.add(termQuery2, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
assertThat("Two or more required clauses, so candidate matches are not verified", result.verified, is(false));
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Also required clauses are taken into account whether the result is verified", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER);
|
||||
builder.add(termQuery2, BooleanClause.Occur.MUST_NOT);
|
||||
result = analyze(builder.build(), Collections.emptyMap());
|
||||
assertThat("Required and prohibited clauses, so candidate matches are not verified", result.verified, is(false));
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Prohibited clause, so candidate matches are not verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
}
|
||||
|
||||
public void testBooleanQueryWithMustAndShouldClauses() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2"));
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3"));
|
||||
builder.add(termQuery3, BooleanClause.Occur.MUST);
|
||||
Result result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Must clause is exact, so this is a verified candidate match", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(result.extractions.size(), equalTo(1));
|
||||
List<QueryExtraction> extractions = new ArrayList<>(result.extractions);
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3")));
|
||||
|
||||
builder.setMinimumNumberShouldMatch(1);
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Must clause is exact, but m_s_m is 1 so one should clause must match too", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(result.extractions.size(), equalTo(1));
|
||||
extractions = new ArrayList<>(result.extractions);
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3")));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
BooleanQuery.Builder innerBuilder = new BooleanQuery.Builder();
|
||||
innerBuilder.setMinimumNumberShouldMatch(2);
|
||||
innerBuilder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
innerBuilder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
builder.add(innerBuilder.build(), BooleanClause.Occur.MUST);
|
||||
builder.add(termQuery3, BooleanClause.Occur.MUST);
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Verified, because m_s_m is specified in an inner clause and not top level clause", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(3));
|
||||
assertThat(result.extractions.size(), equalTo(3));
|
||||
extractions = new ArrayList<>(result.extractions);
|
||||
extractions.sort(Comparator.comparing(key -> key.term));
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1")));
|
||||
assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2")));
|
||||
assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3")));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(innerBuilder.build(), BooleanClause.Occur.SHOULD);
|
||||
builder.add(termQuery3, BooleanClause.Occur.MUST);
|
||||
result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat("Verified, because m_s_m is specified in an inner clause and not top level clause", result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(result.extractions.size(), equalTo(1));
|
||||
extractions = new ArrayList<>(result.extractions);
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3")));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_constantScoreQuery() {
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term"));
|
||||
ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(termQuery1);
|
||||
Result result = analyze(constantScoreQuery, Collections.emptyMap());
|
||||
Result result = analyze(constantScoreQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field()));
|
||||
|
@ -272,8 +447,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
public void testExtractQueryMetadata_boostQuery() {
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term"));
|
||||
BoostQuery constantScoreQuery = new BoostQuery(termQuery1, 1f);
|
||||
Result result = analyze(constantScoreQuery, Collections.emptyMap());
|
||||
Result result = analyze(constantScoreQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field()));
|
||||
|
@ -284,11 +460,13 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 100);
|
||||
commonTermsQuery.add(new Term("_field", "_term1"));
|
||||
commonTermsQuery.add(new Term("_field", "_term2"));
|
||||
Result result = analyze(commonTermsQuery, Collections.emptyMap());
|
||||
Result result = analyze(commonTermsQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo("_field"));
|
||||
assertThat(terms.get(0).text(), equalTo("_term1"));
|
||||
assertThat(terms.get(1).field(), equalTo("_field"));
|
||||
|
@ -298,8 +476,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
public void testExtractQueryMetadata_blendedTermQuery() {
|
||||
Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")};
|
||||
BlendedTermQuery commonTermsQuery = BlendedTermQuery.dismaxBlendedQuery(termsArr, 1.0f);
|
||||
Result result = analyze(commonTermsQuery, Collections.emptyMap());
|
||||
Result result = analyze(commonTermsQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryAnalyzer.QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(2));
|
||||
|
@ -322,8 +501,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
// 4) FieldMaskingSpanQuery is a tricky query so we shouldn't optimize this
|
||||
|
||||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
Result result = analyze(spanTermQuery1, Collections.emptyMap());
|
||||
Result result = analyze(spanTermQuery1, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm());
|
||||
}
|
||||
|
||||
|
@ -333,8 +513,21 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true)
|
||||
.addClause(spanTermQuery1).addClause(spanTermQuery2).build();
|
||||
|
||||
Result result = analyze(spanNearQuery, Collections.emptyMap());
|
||||
Result result = analyze(spanNearQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm());
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_spanNearQuery_pre6dot1() {
|
||||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true)
|
||||
.addClause(spanTermQuery1).addClause(spanTermQuery2).build();
|
||||
|
||||
Result result = analyze(spanNearQuery, Version.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery2.getTerm());
|
||||
}
|
||||
|
||||
|
@ -342,16 +535,18 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2);
|
||||
Result result = analyze(spanOrQuery, Collections.emptyMap());
|
||||
Result result = analyze(spanOrQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm());
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_spanFirstQuery() {
|
||||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanFirstQuery spanFirstQuery = new SpanFirstQuery(spanTermQuery1, 20);
|
||||
Result result = analyze(spanFirstQuery, Collections.emptyMap());
|
||||
Result result = analyze(spanFirstQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm());
|
||||
}
|
||||
|
||||
|
@ -359,47 +554,54 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
SpanNotQuery spanNotQuery = new SpanNotQuery(spanTermQuery1, spanTermQuery2);
|
||||
Result result = analyze(spanNotQuery, Collections.emptyMap());
|
||||
Result result = analyze(spanNotQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm());
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_matchNoDocsQuery() {
|
||||
Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all"), Collections.emptyMap());
|
||||
Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all"), Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertEquals(0, result.extractions.size());
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST);
|
||||
bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST);
|
||||
result = analyze(bq.build(), Collections.emptyMap());
|
||||
assertThat(result.verified, is(false));
|
||||
assertEquals(0, result.extractions.size());
|
||||
result = analyze(bq.build(), Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
assertTermsEqual(result.extractions, new Term("field", "value"));
|
||||
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD);
|
||||
bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD);
|
||||
result = analyze(bq.build(), Collections.emptyMap());
|
||||
result = analyze(bq.build(), Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("field", "value"));
|
||||
|
||||
DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery(
|
||||
Arrays.asList(new TermQuery(new Term("field", "value")), new MatchNoDocsQuery("sometimes there is no reason at all")),
|
||||
1f
|
||||
);
|
||||
result = analyze(disjunctionMaxQuery, Collections.emptyMap());
|
||||
result = analyze(disjunctionMaxQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("field", "value"));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_matchAllDocsQuery() {
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery(), Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery(), Version.CURRENT));
|
||||
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
Result result = analyze(builder.build(), Collections.emptyMap());
|
||||
Result result = analyze(builder.build(), Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("field", "value"));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
|
@ -407,40 +609,40 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
BooleanQuery bq1 = builder.build();
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq1, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq1, Version.CURRENT));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
BooleanQuery bq2 = builder.build();
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Version.CURRENT));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
BooleanQuery bq3 = builder.build();
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq3, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq3, Version.CURRENT));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
BooleanQuery bq4 = builder.build();
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq4, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq4, Version.CURRENT));
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD);
|
||||
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
|
||||
BooleanQuery bq5 = builder.build();
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq5, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(bq5, Version.CURRENT));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_unsupportedQuery() {
|
||||
TermRangeQuery termRangeQuery = new TermRangeQuery("_field", null, null, true, false);
|
||||
UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class,
|
||||
() -> analyze(termRangeQuery, Collections.emptyMap()));
|
||||
() -> analyze(termRangeQuery, Version.CURRENT));
|
||||
assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery));
|
||||
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term"));
|
||||
|
@ -449,7 +651,7 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(termRangeQuery, BooleanClause.Occur.SHOULD);
|
||||
BooleanQuery bq = builder.build();
|
||||
|
||||
e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq, Collections.emptyMap()));
|
||||
e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq, Version.CURRENT));
|
||||
assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery));
|
||||
}
|
||||
|
||||
|
@ -462,8 +664,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(unsupportedQuery, BooleanClause.Occur.MUST);
|
||||
BooleanQuery bq1 = builder.build();
|
||||
|
||||
Result result = analyze(bq1, Collections.emptyMap());
|
||||
Result result = analyze(bq1, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, termQuery1.getTerm());
|
||||
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_longer_term"));
|
||||
|
@ -472,15 +675,16 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
builder.add(termQuery2, BooleanClause.Occur.MUST);
|
||||
builder.add(unsupportedQuery, BooleanClause.Occur.MUST);
|
||||
bq1 = builder.build();
|
||||
result = analyze(bq1, Collections.emptyMap());
|
||||
result = analyze(bq1, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertTermsEqual(result.extractions, termQuery2.getTerm());
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
assertTermsEqual(result.extractions, termQuery1.getTerm(), termQuery2.getTerm());
|
||||
|
||||
builder = new BooleanQuery.Builder();
|
||||
builder.add(unsupportedQuery, BooleanClause.Occur.MUST);
|
||||
builder.add(unsupportedQuery, BooleanClause.Occur.MUST);
|
||||
BooleanQuery bq2 = builder.build();
|
||||
UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Collections.emptyMap()));
|
||||
UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Version.CURRENT));
|
||||
assertThat(e.getUnsupportedQuery(), sameInstance(unsupportedQuery));
|
||||
}
|
||||
|
||||
|
@ -493,8 +697,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f
|
||||
);
|
||||
|
||||
Result result = analyze(disjunctionMaxQuery, Collections.emptyMap());
|
||||
Result result = analyze(disjunctionMaxQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryAnalyzer.QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(4));
|
||||
|
@ -511,8 +716,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f
|
||||
);
|
||||
|
||||
result = analyze(disjunctionMaxQuery, Collections.emptyMap());
|
||||
result = analyze(disjunctionMaxQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(4));
|
||||
|
@ -528,148 +734,91 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
public void testSynonymQuery() {
|
||||
SynonymQuery query = new SynonymQuery();
|
||||
Result result = analyze(query, Collections.emptyMap());
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(result.extractions.isEmpty(), is(true));
|
||||
|
||||
query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2"));
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("_field", "_value1"), new Term("_field", "_value2"));
|
||||
}
|
||||
|
||||
public void testFunctionScoreQuery() {
|
||||
TermQuery termQuery = new TermQuery(new Term("_field", "_value"));
|
||||
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null));
|
||||
Result result = analyze(functionScoreQuery, Collections.emptyMap());
|
||||
Result result = analyze(functionScoreQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(true));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("_field", "_value"));
|
||||
|
||||
functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null),
|
||||
CombineFunction.MULTIPLY, 1f, 10f);
|
||||
result = analyze(functionScoreQuery, Collections.emptyMap());
|
||||
result = analyze(functionScoreQuery, Version.CURRENT);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, new Term("_field", "_value"));
|
||||
}
|
||||
|
||||
public void testSelectBestExtraction() {
|
||||
Set<QueryExtraction> queryTerms1 = terms(new int[0], "12", "1234", "12345");
|
||||
Set<QueryAnalyzer.QueryExtraction> queryTerms2 = terms(new int[0], "123", "1234", "12345");
|
||||
Set<QueryExtraction> result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
Set<QueryExtraction> result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3});
|
||||
queryTerms2 = terms(new int[]{2, 3, 4});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{4, 5, 6});
|
||||
queryTerms2 = terms(new int[]{1, 2, 3});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456");
|
||||
queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456");
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{10});
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{10}, "123");
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{10}, "1", "123");
|
||||
queryTerms2 = terms(new int[]{1}, "1", "2");
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456");
|
||||
queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456");
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{});
|
||||
queryTerms2 = terms(new int[]{});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query extractions are empty", queryTerms2, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{1});
|
||||
queryTerms2 = terms(new int[]{});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms1, result);
|
||||
|
||||
queryTerms1 = terms(new int[]{});
|
||||
queryTerms2 = terms(new int[]{1});
|
||||
result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2);
|
||||
result = selectBestExtraction(queryTerms1, queryTerms2);
|
||||
assertSame("In case query a single extraction is empty", queryTerms2, result);
|
||||
}
|
||||
|
||||
public void testSelectBestExtraction_boostFields() {
|
||||
Set<QueryExtraction> queryTerms1 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("status_field", "sold")),
|
||||
new QueryExtraction(new Term("category", "accessory"))
|
||||
));
|
||||
Set<QueryAnalyzer.QueryExtraction> queryTerms2 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("status_field", "instock")),
|
||||
new QueryExtraction(new Term("category", "hardware"))
|
||||
));
|
||||
Set<QueryExtraction> result = selectBestExtraction(Collections.singletonMap("status_field", 0F), queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
byte[] interval = new byte[Long.BYTES];
|
||||
LongPoint.encodeDimension(4, interval, 0);
|
||||
queryTerms1 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("status_field", "sold")),
|
||||
new QueryExtraction(new QueryAnalyzer.Range("price", null, null, interval))
|
||||
));
|
||||
interval = new byte[Long.BYTES];
|
||||
LongPoint.encodeDimension(8, interval, 0);
|
||||
queryTerms2 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("status_field", "instock")),
|
||||
new QueryExtraction(new QueryAnalyzer.Range("price", null, null, interval))
|
||||
));
|
||||
result = selectBestExtraction(Collections.singletonMap("status_field", 0F), queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
Map<String, Float> boostFields = new HashMap<>();
|
||||
boostFields.put("field1", 2F);
|
||||
boostFields.put("field2", 0.5F);
|
||||
boostFields.put("field4", 3F);
|
||||
boostFields.put("field5", 0.6F);
|
||||
queryTerms1 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("field1", "sold")),
|
||||
new QueryExtraction(new Term("field2", "accessory")),
|
||||
new QueryExtraction(new QueryAnalyzer.Range("field3", null, null, new byte[0]))
|
||||
));
|
||||
queryTerms2 = new HashSet<>(Arrays.asList(
|
||||
new QueryExtraction(new Term("field3", "sold")),
|
||||
new QueryExtraction(new Term("field4", "accessory")),
|
||||
new QueryExtraction(new QueryAnalyzer.Range("field5", null, null, new byte[0]))
|
||||
));
|
||||
result = selectBestExtraction(boostFields, queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
|
||||
boostFields.put("field2", 6F);
|
||||
result = selectBestExtraction(boostFields, queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
|
||||
boostFields.put("field2", 0F);
|
||||
boostFields.put("field3", 0F);
|
||||
boostFields.put("field5", 0F);
|
||||
result = selectBestExtraction(boostFields, queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms2, result);
|
||||
|
||||
boostFields = new HashMap<>();
|
||||
boostFields.put("field2", 2F);
|
||||
result = selectBestExtraction(boostFields, queryTerms1, queryTerms2);
|
||||
assertSame(queryTerms1, result);
|
||||
}
|
||||
|
||||
public void testSelectBestExtraction_random() {
|
||||
Set<QueryExtraction> terms1 = new HashSet<>();
|
||||
int shortestTerms1Length = Integer.MAX_VALUE;
|
||||
|
@ -691,7 +840,7 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
sumTermLength -= length;
|
||||
}
|
||||
|
||||
Set<QueryAnalyzer.QueryExtraction> result = selectBestExtraction(Collections.emptyMap(), terms1, terms2);
|
||||
Set<QueryAnalyzer.QueryExtraction> result = selectBestExtraction(terms1, terms2);
|
||||
Set<QueryExtraction> expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2;
|
||||
assertThat(result, sameInstance(expected));
|
||||
}
|
||||
|
@ -699,8 +848,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
public void testPointRangeQuery() {
|
||||
// int ranges get converted to long ranges:
|
||||
Query query = IntPoint.newRangeQuery("_field", 10, 20);
|
||||
Result result = analyze(query, Collections.emptyMap());
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryAnalyzer.QueryExtraction> ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
assertNull(ranges.get(0).term);
|
||||
|
@ -709,7 +859,8 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
assertDimension(ranges.get(0).range.upperPoint, bytes -> IntPoint.encodeDimension(20, bytes, 0));
|
||||
|
||||
query = LongPoint.newRangeQuery("_field", 10L, 21L);
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertFalse(result.verified);
|
||||
ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
|
@ -720,7 +871,8 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
// Half float ranges get converted to double ranges:
|
||||
query = HalfFloatPoint.newRangeQuery("_field", 10F, 20F);
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertFalse(result.verified);
|
||||
ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
|
@ -731,7 +883,8 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
// Float ranges get converted to double ranges:
|
||||
query = FloatPoint.newRangeQuery("_field", 10F, 20F);
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertFalse(result.verified);
|
||||
ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
|
@ -741,7 +894,8 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
assertDimension(ranges.get(0).range.upperPoint, bytes -> FloatPoint.encodeDimension(20F, bytes, 0));
|
||||
|
||||
query = DoublePoint.newRangeQuery("_field", 10D, 20D);
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertFalse(result.verified);
|
||||
ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
|
@ -752,7 +906,8 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
|
||||
query = InetAddressPoint.newRangeQuery("_field", InetAddresses.forString("192.168.1.0"),
|
||||
InetAddresses.forString("192.168.1.255"));
|
||||
result = analyze(query, Collections.emptyMap());
|
||||
result = analyze(query, Version.CURRENT);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertFalse(result.verified);
|
||||
ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
|
@ -765,24 +920,26 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
public void testTooManyPointDimensions() {
|
||||
// For now no extraction support for geo queries:
|
||||
Query query1 = LatLonPoint.newBoxQuery("_field", 0, 1, 0, 1);
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(query1, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(query1, Version.CURRENT));
|
||||
|
||||
Query query2 = LongPoint.newRangeQuery("_field", new long[]{0, 0, 0}, new long[]{1, 1, 1});
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(query2, Collections.emptyMap()));
|
||||
expectThrows(UnsupportedQueryException.class, () -> analyze(query2, Version.CURRENT));
|
||||
}
|
||||
|
||||
public void testPointRangeQuery_lowerUpperReversed() {
|
||||
Query query = IntPoint.newRangeQuery("_field", 20, 10);
|
||||
Result result = analyze(query, Collections.emptyMap());
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertTrue(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertThat(result.extractions.size(), equalTo(0));
|
||||
}
|
||||
|
||||
public void testIndexOrDocValuesQuery() {
|
||||
Query query = new IndexOrDocValuesQuery(IntPoint.newRangeQuery("_field", 10, 20),
|
||||
SortedNumericDocValuesField.newSlowRangeQuery("_field", 10, 20));
|
||||
Result result = analyze(query, Collections.emptyMap());
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryAnalyzer.QueryExtraction> ranges = new ArrayList<>(result.extractions);
|
||||
assertThat(ranges.size(), equalTo(1));
|
||||
assertNull(ranges.get(0).term);
|
||||
|
@ -795,8 +952,9 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
TermQuery termQuery = new TermQuery(new Term("field", "value"));
|
||||
QueryBitSetProducer queryBitSetProducer = new QueryBitSetProducer(new TermQuery(new Term("_type", "child")));
|
||||
ESToParentBlockJoinQuery query = new ESToParentBlockJoinQuery(termQuery, queryBitSetProducer, ScoreMode.None, "child");
|
||||
Result result = analyze(query, Collections.emptyMap());
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertNull(result.extractions.toArray(new QueryExtraction[0])[0].range);
|
||||
assertEquals(new Term("field", "value"), result.extractions.toArray(new QueryExtraction[0])[0].term);
|
||||
|
@ -806,44 +964,101 @@ public class QueryAnalyzerTests extends ESTestCase {
|
|||
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
Result result = analyze(boolQuery.build(), Collections.emptyMap());
|
||||
Result result = analyze(boolQuery.build(), Version.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(IntPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Collections.emptyMap());
|
||||
result = analyze(boolQuery.build(), Version.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Collections.emptyMap());
|
||||
result = analyze(boolQuery.build(), Version.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(FloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Collections.emptyMap());
|
||||
result = analyze(boolQuery.build(), Version.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(HalfFloatPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(HalfFloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Collections.emptyMap());
|
||||
result = analyze(boolQuery.build(), Version.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
}
|
||||
|
||||
public void testPointRangeQuerySelectRanges() {
|
||||
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.SHOULD);
|
||||
Result result = analyze(boolQuery.build(), Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(2, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
assertEquals(2, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(2, result.extractions.size());
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.SHOULD);
|
||||
result = analyze(boolQuery.build(), Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(2));
|
||||
assertEquals(2, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 15), BooleanClause.Occur.SHOULD);
|
||||
result = analyze(boolQuery.build(), Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(2, result.extractions.size());
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName);
|
||||
}
|
||||
|
||||
private static void assertDimension(byte[] expected, Consumer<byte[]> consumer) {
|
||||
byte[] dest = new byte[expected.length];
|
||||
consumer.accept(dest);
|
||||
|
|
|
@ -27,7 +27,7 @@ grant codeBase "${codebase.elasticsearch-rest-client}" {
|
|||
permission java.net.NetPermission "getProxySelector";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.httpasyncclient-4.1.2.jar}" {
|
||||
grant codeBase "${codebase.httpasyncclient}" {
|
||||
// rest client uses system properties which gets the default proxy
|
||||
permission java.net.NetPermission "getProxySelector";
|
||||
};
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
grant codeBase "${codebase.netty-common-4.1.13.Final.jar}" {
|
||||
grant codeBase "${codebase.netty-common}" {
|
||||
// for reading the system-wide configuration for the backlog of established sockets
|
||||
permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read";
|
||||
|
||||
|
@ -25,7 +25,7 @@ grant codeBase "${codebase.netty-common-4.1.13.Final.jar}" {
|
|||
permission java.net.SocketPermission "*", "accept,connect";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.netty-transport-4.1.13.Final.jar}" {
|
||||
grant codeBase "${codebase.netty-transport}" {
|
||||
// Netty NioEventLoop wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854
|
||||
// the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely!
|
||||
permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write";
|
||||
|
|
|
@ -33,8 +33,8 @@ dependencies {
|
|||
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
|
||||
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
||||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||
compile 'com.fasterxml.jackson.core:jackson-databind:2.5.3'
|
||||
compile 'com.fasterxml.jackson.core:jackson-annotations:2.5.0'
|
||||
compile 'com.fasterxml.jackson.core:jackson-databind:2.6.7.1'
|
||||
compile 'com.fasterxml.jackson.core:jackson-annotations:2.6.0'
|
||||
}
|
||||
|
||||
dependencyLicenses {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
a2a55a3375bc1cef830ca426d68d2ea22961190e
|
|
@ -0,0 +1 @@
|
|||
a0990e2e812ac6639b6ce955c91b13228500476e
|
|
@ -1 +0,0 @@
|
|||
c37875ff66127d93e5f672708cb2dcc14c8232ab
|
|
@ -0,0 +1 @@
|
|||
306775aeb5164835a1dcbdf3f945587045cfb3b5
|
|
@ -38,44 +38,47 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public final class AzureStorageSettings {
|
||||
|
||||
// prefix for azure client settings
|
||||
private static final String PREFIX = "azure.client.";
|
||||
private static final String AZURE_CLIENT_PREFIX_KEY = "azure.client.";
|
||||
|
||||
/** Azure account name */
|
||||
public static final AffixSetting<SecureString> ACCOUNT_SETTING =
|
||||
Setting.affixKeySetting(PREFIX, "account", key -> SecureSetting.secureString(key, null));
|
||||
Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "account", key -> SecureSetting.secureString(key, null));
|
||||
|
||||
/** Azure key */
|
||||
public static final AffixSetting<SecureString> KEY_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "key",
|
||||
key -> SecureSetting.secureString(key, null));
|
||||
|
||||
/** max_retries: Number of retries in case of Azure errors. Defaults to 3 (RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT). */
|
||||
private static final Setting<Integer> MAX_RETRIES_SETTING =
|
||||
Setting.affixKeySetting(PREFIX, "max_retries",
|
||||
(key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope));
|
||||
Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "max_retries",
|
||||
(key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope),
|
||||
ACCOUNT_SETTING, KEY_SETTING);
|
||||
/**
|
||||
* Azure endpoint suffix. Default to core.windows.net (CloudStorageAccount.DEFAULT_DNS).
|
||||
*/
|
||||
public static final Setting<String> ENDPOINT_SUFFIX_SETTING = Setting.affixKeySetting(PREFIX, "endpoint_suffix",
|
||||
key -> Setting.simpleString(key, Property.NodeScope));
|
||||
public static final Setting<String> ENDPOINT_SUFFIX_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "endpoint_suffix",
|
||||
key -> Setting.simpleString(key, Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING);
|
||||
|
||||
/** Azure key */
|
||||
public static final AffixSetting<SecureString> KEY_SETTING = Setting.affixKeySetting(PREFIX, "key",
|
||||
key -> SecureSetting.secureString(key, null));
|
||||
|
||||
public static final AffixSetting<TimeValue> TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "timeout",
|
||||
(key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(-1), Property.NodeScope));
|
||||
public static final AffixSetting<TimeValue> TIMEOUT_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "timeout",
|
||||
(key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(-1), Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING);
|
||||
|
||||
/** The type of the proxy to connect to azure through. Can be direct (no proxy, default), http or socks */
|
||||
public static final AffixSetting<Proxy.Type> PROXY_TYPE_SETTING = Setting.affixKeySetting(PREFIX, "proxy.type",
|
||||
(key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope));
|
||||
public static final AffixSetting<Proxy.Type> PROXY_TYPE_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.type",
|
||||
(key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope)
|
||||
, ACCOUNT_SETTING, KEY_SETTING);
|
||||
|
||||
/** The host name of a proxy to connect to azure through. */
|
||||
public static final Setting<String> PROXY_HOST_SETTING = Setting.affixKeySetting(PREFIX, "proxy.host",
|
||||
(key) -> Setting.simpleString(key, Property.NodeScope));
|
||||
public static final AffixSetting<String> PROXY_HOST_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.host",
|
||||
(key) -> Setting.simpleString(key, Property.NodeScope), KEY_SETTING, ACCOUNT_SETTING, PROXY_TYPE_SETTING);
|
||||
|
||||
/** The port of a proxy to connect to azure through. */
|
||||
public static final Setting<Integer> PROXY_PORT_SETTING = Setting.affixKeySetting(PREFIX, "proxy.port",
|
||||
(key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope));
|
||||
public static final Setting<Integer> PROXY_PORT_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.port",
|
||||
(key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING, PROXY_TYPE_SETTING,
|
||||
PROXY_HOST_SETTING);
|
||||
|
||||
private final String account;
|
||||
private final String key;
|
||||
|
@ -157,9 +160,8 @@ public final class AzureStorageSettings {
|
|||
*/
|
||||
public static Map<String, AzureStorageSettings> load(Settings settings) {
|
||||
// Get the list of existing named configurations
|
||||
Set<String> clientNames = settings.getGroups(PREFIX).keySet();
|
||||
Map<String, AzureStorageSettings> storageSettings = new HashMap<>();
|
||||
for (String clientName : clientNames) {
|
||||
for (String clientName : ACCOUNT_SETTING.getNamespaces(settings)) {
|
||||
storageSettings.put(clientName, getClientSettings(settings, clientName));
|
||||
}
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.SecureString;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -54,10 +55,8 @@ import java.util.Set;
|
|||
|
||||
interface GoogleCloudStorageService {
|
||||
|
||||
String SETTINGS_PREFIX = "gcs.client.";
|
||||
|
||||
/** A json credentials file loaded from secure settings. */
|
||||
Setting.AffixSetting<InputStream> CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(SETTINGS_PREFIX, "credentials_file",
|
||||
Setting.AffixSetting<InputStream> CREDENTIALS_FILE_SETTING = Setting.affixKeySetting("gcs.client.", "credentials_file",
|
||||
key -> SecureSetting.secureFile(key, null));
|
||||
|
||||
/**
|
||||
|
@ -176,16 +175,15 @@ interface GoogleCloudStorageService {
|
|||
|
||||
/** Load all secure credentials from the settings. */
|
||||
static Map<String, GoogleCredential> loadClientCredentials(Settings settings) {
|
||||
Set<String> clientNames = settings.getGroups(SETTINGS_PREFIX).keySet();
|
||||
Map<String, GoogleCredential> credentials = new HashMap<>();
|
||||
for (String clientName : clientNames) {
|
||||
Setting<InputStream> concreteSetting = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName);
|
||||
Iterable<Setting<InputStream>> iterable = CREDENTIALS_FILE_SETTING.getAllConcreteSettings(settings)::iterator;
|
||||
for (Setting<InputStream> concreteSetting : iterable) {
|
||||
try (InputStream credStream = concreteSetting.get(settings)) {
|
||||
GoogleCredential credential = GoogleCredential.fromStream(credStream);
|
||||
if (credential.createScopedRequired()) {
|
||||
credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL));
|
||||
}
|
||||
credentials.put(clientName, credential);
|
||||
credentials.put(CREDENTIALS_FILE_SETTING.getNamespace(concreteSetting), credential);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
|
|
@ -34,8 +34,8 @@ dependencies {
|
|||
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
|
||||
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
||||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||
compile "com.fasterxml.jackson.core:jackson-databind:2.5.3"
|
||||
compile "com.fasterxml.jackson.core:jackson-annotations:2.5.0"
|
||||
compile 'com.fasterxml.jackson.core:jackson-databind:2.6.7.1'
|
||||
compile 'com.fasterxml.jackson.core:jackson-annotations:2.6.0'
|
||||
|
||||
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
|
||||
// and whitelist this hack in JarHell
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
a2a55a3375bc1cef830ca426d68d2ea22961190e
|
|
@ -0,0 +1 @@
|
|||
a0990e2e812ac6639b6ce955c91b13228500476e
|
|
@ -1 +0,0 @@
|
|||
c37875ff66127d93e5f672708cb2dcc14c8232ab
|
|
@ -0,0 +1 @@
|
|||
306775aeb5164835a1dcbdf3f945587045cfb3b5
|
|
@ -1,223 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories.s3;
|
||||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import com.amazonaws.services.s3.model.PartETag;
|
||||
import com.amazonaws.services.s3.model.PutObjectRequest;
|
||||
import com.amazonaws.services.s3.model.PutObjectResult;
|
||||
import com.amazonaws.services.s3.model.UploadPartRequest;
|
||||
import com.amazonaws.services.s3.model.UploadPartResult;
|
||||
import com.amazonaws.util.Base64;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.DigestInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* DefaultS3OutputStream uploads data to the AWS S3 service using 2 modes: single and multi part.
|
||||
* <p>
|
||||
* When the length of the chunk is lower than buffer_size, the chunk is uploaded with a single request.
|
||||
* Otherwise multiple requests are made, each of buffer_size (except the last one which can be lower than buffer_size).
|
||||
* <p>
|
||||
* Quick facts about S3:
|
||||
* <p>
|
||||
* Maximum object size: 5 TB
|
||||
* Maximum number of parts per upload: 10,000
|
||||
* Part numbers: 1 to 10,000 (inclusive)
|
||||
* Part size: 5 MB to 5 GB, last part can be < 5 MB
|
||||
* <p>
|
||||
* See http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html
|
||||
* See http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html
|
||||
*/
|
||||
class DefaultS3OutputStream extends S3OutputStream {
|
||||
|
||||
private static final ByteSizeValue MULTIPART_MAX_SIZE = new ByteSizeValue(5, ByteSizeUnit.GB);
|
||||
private static final Logger logger = Loggers.getLogger("cloud.aws");
|
||||
/**
|
||||
* Multipart Upload API data
|
||||
*/
|
||||
private String multipartId;
|
||||
private int multipartChunks;
|
||||
private List<PartETag> multiparts;
|
||||
|
||||
DefaultS3OutputStream(S3BlobStore blobStore, String bucketName, String blobName, int bufferSizeInBytes, boolean serverSideEncryption) {
|
||||
super(blobStore, bucketName, blobName, bufferSizeInBytes, serverSideEncryption);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush(byte[] bytes, int off, int len, boolean closing) throws IOException {
|
||||
SocketAccess.doPrivilegedIOException(() -> {
|
||||
flushPrivileged(bytes, off, len, closing);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
private void flushPrivileged(byte[] bytes, int off, int len, boolean closing) throws IOException {
|
||||
if (len > MULTIPART_MAX_SIZE.getBytes()) {
|
||||
throw new IOException("Unable to upload files larger than " + MULTIPART_MAX_SIZE + " to Amazon S3");
|
||||
}
|
||||
|
||||
if (!closing) {
|
||||
if (len < getBufferSize()) {
|
||||
upload(bytes, off, len);
|
||||
} else {
|
||||
if (getFlushCount() == 0) {
|
||||
initializeMultipart();
|
||||
}
|
||||
uploadMultipart(bytes, off, len, false);
|
||||
}
|
||||
} else {
|
||||
if (multipartId != null) {
|
||||
uploadMultipart(bytes, off, len, true);
|
||||
completeMultipart();
|
||||
} else {
|
||||
upload(bytes, off, len);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload data using a single request.
|
||||
*/
|
||||
private void upload(byte[] bytes, int off, int len) throws IOException {
|
||||
try (ByteArrayInputStream is = new ByteArrayInputStream(bytes, off, len)) {
|
||||
try {
|
||||
doUpload(getBlobStore(), getBucketName(), getBlobName(), is, len, isServerSideEncryption());
|
||||
} catch (AmazonClientException e) {
|
||||
throw new IOException("Unable to upload object " + getBlobName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void doUpload(S3BlobStore blobStore, String bucketName, String blobName, InputStream is, int length,
|
||||
boolean serverSideEncryption) throws AmazonS3Exception {
|
||||
ObjectMetadata md = new ObjectMetadata();
|
||||
if (serverSideEncryption) {
|
||||
md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
|
||||
}
|
||||
md.setContentLength(length);
|
||||
|
||||
PutObjectRequest putRequest = new PutObjectRequest(bucketName, blobName, is, md)
|
||||
.withStorageClass(blobStore.getStorageClass())
|
||||
.withCannedAcl(blobStore.getCannedACL());
|
||||
blobStore.client().putObject(putRequest);
|
||||
|
||||
}
|
||||
|
||||
private void initializeMultipart() {
|
||||
while (multipartId == null) {
|
||||
multipartId = doInitialize(getBlobStore(), getBucketName(), getBlobName(), isServerSideEncryption());
|
||||
if (multipartId != null) {
|
||||
multipartChunks = 1;
|
||||
multiparts = new ArrayList<>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected String doInitialize(S3BlobStore blobStore, String bucketName, String blobName, boolean serverSideEncryption) {
|
||||
InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, blobName)
|
||||
.withCannedACL(blobStore.getCannedACL())
|
||||
.withStorageClass(blobStore.getStorageClass());
|
||||
|
||||
if (serverSideEncryption) {
|
||||
ObjectMetadata md = new ObjectMetadata();
|
||||
md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
|
||||
request.setObjectMetadata(md);
|
||||
}
|
||||
|
||||
return blobStore.client().initiateMultipartUpload(request).getUploadId();
|
||||
}
|
||||
|
||||
private void uploadMultipart(byte[] bytes, int off, int len, boolean lastPart) throws IOException {
|
||||
try (ByteArrayInputStream is = new ByteArrayInputStream(bytes, off, len)) {
|
||||
try {
|
||||
PartETag partETag = doUploadMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId, is, len, lastPart);
|
||||
multiparts.add(partETag);
|
||||
multipartChunks++;
|
||||
} catch (AmazonClientException e) {
|
||||
abortMultipart();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected PartETag doUploadMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, InputStream is,
|
||||
int length, boolean lastPart) throws AmazonS3Exception {
|
||||
UploadPartRequest request = new UploadPartRequest()
|
||||
.withBucketName(bucketName)
|
||||
.withKey(blobName)
|
||||
.withUploadId(uploadId)
|
||||
.withPartNumber(multipartChunks)
|
||||
.withInputStream(is)
|
||||
.withPartSize(length)
|
||||
.withLastPart(lastPart);
|
||||
|
||||
UploadPartResult response = blobStore.client().uploadPart(request);
|
||||
return response.getPartETag();
|
||||
|
||||
}
|
||||
|
||||
private void completeMultipart() {
|
||||
try {
|
||||
doCompleteMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId, multiparts);
|
||||
multipartId = null;
|
||||
return;
|
||||
} catch (AmazonClientException e) {
|
||||
abortMultipart();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
protected void doCompleteMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, List<PartETag> parts)
|
||||
throws AmazonS3Exception {
|
||||
CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId, parts);
|
||||
blobStore.client().completeMultipartUpload(request);
|
||||
}
|
||||
|
||||
private void abortMultipart() {
|
||||
if (multipartId != null) {
|
||||
try {
|
||||
doAbortMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId);
|
||||
} finally {
|
||||
multipartId = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void doAbortMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId)
|
||||
throws AmazonS3Exception {
|
||||
blobStore.client().abortMultipartUpload(new AbortMultipartUploadRequest(bucketName, blobName, uploadId));
|
||||
}
|
||||
}
|
|
@ -21,35 +21,48 @@ package org.elasticsearch.repositories.s3;
|
|||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.CopyObjectRequest;
|
||||
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
|
||||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import com.amazonaws.services.s3.model.PartETag;
|
||||
import com.amazonaws.services.s3.model.PutObjectRequest;
|
||||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import com.amazonaws.services.s3.model.UploadPartRequest;
|
||||
import com.amazonaws.services.s3.model.UploadPartResult;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStoreException;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.repositories.s3.S3Repository.MAX_FILE_SIZE;
|
||||
import static org.elasticsearch.repositories.s3.S3Repository.MAX_FILE_SIZE_USING_MULTIPART;
|
||||
import static org.elasticsearch.repositories.s3.S3Repository.MIN_PART_SIZE_USING_MULTIPART;
|
||||
|
||||
class S3BlobContainer extends AbstractBlobContainer {
|
||||
|
||||
protected final S3BlobStore blobStore;
|
||||
|
||||
protected final String keyPath;
|
||||
private final S3BlobStore blobStore;
|
||||
private final String keyPath;
|
||||
|
||||
S3BlobContainer(BlobPath path, S3BlobStore blobStore) {
|
||||
super(path);
|
||||
|
@ -91,9 +104,15 @@ class S3BlobContainer extends AbstractBlobContainer {
|
|||
if (blobExists(blobName)) {
|
||||
throw new FileAlreadyExistsException("blob [" + blobName + "] already exists, cannot overwrite");
|
||||
}
|
||||
try (OutputStream stream = createOutput(blobName)) {
|
||||
Streams.copy(inputStream, stream);
|
||||
}
|
||||
|
||||
SocketAccess.doPrivilegedIOException(() -> {
|
||||
if (blobSize <= blobStore.bufferSizeInBytes()) {
|
||||
executeSingleUpload(blobStore, buildKey(blobName), inputStream, blobSize);
|
||||
} else {
|
||||
executeMultipartUpload(blobStore, buildKey(blobName), inputStream, blobSize);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -109,12 +128,6 @@ class S3BlobContainer extends AbstractBlobContainer {
|
|||
}
|
||||
}
|
||||
|
||||
private OutputStream createOutput(final String blobName) throws IOException {
|
||||
// UploadS3OutputStream does buffering & retry logic internally
|
||||
return new DefaultS3OutputStream(blobStore, blobStore.bucket(), buildKey(blobName),
|
||||
blobStore.bufferSizeInBytes(), blobStore.serverSideEncryption());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException {
|
||||
return AccessController.doPrivileged((PrivilegedAction<Map<String, BlobMetaData>>) () -> {
|
||||
|
@ -175,7 +188,158 @@ class S3BlobContainer extends AbstractBlobContainer {
|
|||
return listBlobsByPrefix(null);
|
||||
}
|
||||
|
||||
protected String buildKey(String blobName) {
|
||||
private String buildKey(String blobName) {
|
||||
return keyPath + blobName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a blob using a single upload request
|
||||
*/
|
||||
void executeSingleUpload(final S3BlobStore blobStore,
|
||||
final String blobName,
|
||||
final InputStream input,
|
||||
final long blobSize) throws IOException {
|
||||
|
||||
// Extra safety checks
|
||||
if (blobSize > MAX_FILE_SIZE.getBytes()) {
|
||||
throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than " + MAX_FILE_SIZE);
|
||||
}
|
||||
if (blobSize > blobStore.bufferSizeInBytes()) {
|
||||
throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than buffer size");
|
||||
}
|
||||
|
||||
try {
|
||||
final ObjectMetadata md = new ObjectMetadata();
|
||||
md.setContentLength(blobSize);
|
||||
if (blobStore.serverSideEncryption()) {
|
||||
md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
|
||||
}
|
||||
|
||||
final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md);
|
||||
putRequest.setStorageClass(blobStore.getStorageClass());
|
||||
putRequest.setCannedAcl(blobStore.getCannedACL());
|
||||
|
||||
blobStore.client().putObject(putRequest);
|
||||
} catch (AmazonClientException e) {
|
||||
throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a blob using multipart upload requests.
|
||||
*/
|
||||
void executeMultipartUpload(final S3BlobStore blobStore,
|
||||
final String blobName,
|
||||
final InputStream input,
|
||||
final long blobSize) throws IOException {
|
||||
|
||||
if (blobSize > MAX_FILE_SIZE_USING_MULTIPART.getBytes()) {
|
||||
throw new IllegalArgumentException("Multipart upload request size [" + blobSize
|
||||
+ "] can't be larger than " + MAX_FILE_SIZE_USING_MULTIPART);
|
||||
}
|
||||
if (blobSize < MIN_PART_SIZE_USING_MULTIPART.getBytes()) {
|
||||
throw new IllegalArgumentException("Multipart upload request size [" + blobSize
|
||||
+ "] can't be smaller than " + MIN_PART_SIZE_USING_MULTIPART);
|
||||
}
|
||||
|
||||
final long partSize = blobStore.bufferSizeInBytes();
|
||||
final Tuple<Long, Long> multiparts = numberOfMultiparts(blobSize, partSize);
|
||||
|
||||
if (multiparts.v1() > Integer.MAX_VALUE) {
|
||||
throw new IllegalArgumentException("Too many multipart upload requests, maybe try a larger buffer size?");
|
||||
}
|
||||
|
||||
final int nbParts = multiparts.v1().intValue();
|
||||
final long lastPartSize = multiparts.v2();
|
||||
assert blobSize == (nbParts - 1) * partSize + lastPartSize : "blobSize does not match multipart sizes";
|
||||
|
||||
final SetOnce<String> uploadId = new SetOnce<>();
|
||||
final String bucketName = blobStore.bucket();
|
||||
boolean success = false;
|
||||
|
||||
try {
|
||||
final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName);
|
||||
initRequest.setStorageClass(blobStore.getStorageClass());
|
||||
initRequest.setCannedACL(blobStore.getCannedACL());
|
||||
if (blobStore.serverSideEncryption()) {
|
||||
final ObjectMetadata md = new ObjectMetadata();
|
||||
md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
|
||||
initRequest.setObjectMetadata(md);
|
||||
}
|
||||
|
||||
uploadId.set(blobStore.client().initiateMultipartUpload(initRequest).getUploadId());
|
||||
if (Strings.isEmpty(uploadId.get())) {
|
||||
throw new IOException("Failed to initialize multipart upload " + blobName);
|
||||
}
|
||||
|
||||
final List<PartETag> parts = new ArrayList<>();
|
||||
|
||||
long bytesCount = 0;
|
||||
for (int i = 1; i <= nbParts; i++) {
|
||||
final UploadPartRequest uploadRequest = new UploadPartRequest();
|
||||
uploadRequest.setBucketName(bucketName);
|
||||
uploadRequest.setKey(blobName);
|
||||
uploadRequest.setUploadId(uploadId.get());
|
||||
uploadRequest.setPartNumber(i);
|
||||
uploadRequest.setInputStream(input);
|
||||
|
||||
if (i < nbParts) {
|
||||
uploadRequest.setPartSize(partSize);
|
||||
uploadRequest.setLastPart(false);
|
||||
} else {
|
||||
uploadRequest.setPartSize(lastPartSize);
|
||||
uploadRequest.setLastPart(true);
|
||||
}
|
||||
bytesCount += uploadRequest.getPartSize();
|
||||
|
||||
final UploadPartResult uploadResponse = blobStore.client().uploadPart(uploadRequest);
|
||||
parts.add(uploadResponse.getPartETag());
|
||||
}
|
||||
|
||||
if (bytesCount != blobSize) {
|
||||
throw new IOException("Failed to execute multipart upload for [" + blobName + "], expected " + blobSize
|
||||
+ "bytes sent but got " + bytesCount);
|
||||
}
|
||||
|
||||
CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), parts);
|
||||
blobStore.client().completeMultipartUpload(complRequest);
|
||||
success = true;
|
||||
|
||||
} catch (AmazonClientException e) {
|
||||
throw new IOException("Unable to upload object [" + blobName + "] using multipart upload", e);
|
||||
} finally {
|
||||
if (success == false && Strings.hasLength(uploadId.get())) {
|
||||
final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get());
|
||||
blobStore.client().abortMultipartUpload(abortRequest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number parts of size of {@code partSize} needed to reach {@code totalSize},
|
||||
* along with the size of the last (or unique) part.
|
||||
*
|
||||
* @param totalSize the total size
|
||||
* @param partSize the part size
|
||||
* @return a {@link Tuple} containing the number of parts to fill {@code totalSize} and
|
||||
* the size of the last part
|
||||
*/
|
||||
static Tuple<Long, Long> numberOfMultiparts(final long totalSize, final long partSize) {
|
||||
if (partSize <= 0) {
|
||||
throw new IllegalArgumentException("Part size must be greater than zero");
|
||||
}
|
||||
|
||||
if (totalSize == 0L || totalSize <= partSize) {
|
||||
return Tuple.tuple(1L, totalSize);
|
||||
}
|
||||
|
||||
final long parts = totalSize / partSize;
|
||||
final long remaining = totalSize % partSize;
|
||||
|
||||
if (remaining == 0) {
|
||||
return Tuple.tuple(parts, partSize);
|
||||
} else {
|
||||
return Tuple.tuple(parts + 1, remaining);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,8 +93,8 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||
return serverSideEncryption;
|
||||
}
|
||||
|
||||
public int bufferSizeInBytes() {
|
||||
return bufferSize.bytesAsInt();
|
||||
public long bufferSizeInBytes() {
|
||||
return bufferSize.getBytes();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue