() {
@Override
- protected NumericTokenStream initialValue() {
- return new NumericTokenStream(Integer.MAX_VALUE);
+ protected LegacyNumericTokenStream initialValue() {
+ return new LegacyNumericTokenStream(Integer.MAX_VALUE);
}
};
@@ -337,7 +337,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
}
}
- protected NumericTokenStream getCachedStream() {
+ protected LegacyNumericTokenStream getCachedStream() {
if (fieldType().numericPrecisionStep() == 4) {
return tokenStream4.get();
} else if (fieldType().numericPrecisionStep() == 8) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java
index 027f0b1b40b..56b1e9a78f2 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java
@@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
-import org.apache.lucene.search.NumericRangeQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
@@ -121,7 +121,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
static final class ShortFieldType extends NumberFieldType {
public ShortFieldType() {
- super(NumericType.INT);
+ super(LegacyNumericType.INT);
}
protected ShortFieldType(ShortFieldType ref) {
@@ -160,13 +160,13 @@ public class ShortFieldMapper extends NumberFieldMapper {
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
- NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
+ LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
- return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
+ return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
@@ -176,7 +176,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = parseValue(value);
short iSim = fuzziness.asShort();
- return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
+ return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@@ -184,8 +184,8 @@ public class ShortFieldMapper extends NumberFieldMapper {
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
- long minValue = NumericUtils.getMinInt(terms);
- long maxValue = NumericUtils.getMaxInt(terms);
+ long minValue = LegacyNumericUtils.getMinInt(terms);
+ long maxValue = LegacyNumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
index f881d206f0c..5e617dd6815 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
@@ -21,13 +21,15 @@ package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.document.Field;
import org.apache.lucene.spatial.util.GeoHashUtils;
-import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
+import org.elasticsearch.common.logging.DeprecationLogger;
+import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -56,6 +58,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
*/
public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
public static final String CONTENT_TYPE = "geo_point";
+ protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BaseGeoPointFieldMapper.class));
public static class Names {
public static final String LAT = "lat";
public static final String LAT_SUFFIX = "." + LAT;
@@ -194,9 +197,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("lat_lon")) {
+ deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed "
+ + "in the next major release");
builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("precision_step")) {
+ deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed "
+ + "in the next major release");
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("geohash")) {
@@ -483,7 +490,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) {
builder.field("lat_lon", fieldType().isLatLonEnabled());
}
- if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
+ if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
}
if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
index 0d84cf21812..75c082dd439 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java
@@ -84,7 +84,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
fieldType.setTokenized(false);
if (context.indexCreatedVersion().before(Version.V_2_3_0)) {
fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP);
- fieldType.setNumericType(FieldType.NumericType.LONG);
+ fieldType.setNumericType(FieldType.LegacyNumericType.LONG);
}
setupFieldType(context);
return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper,
@@ -95,7 +95,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
public GeoPointFieldMapper build(BuilderContext context) {
if (context.indexCreatedVersion().before(Version.V_2_3_0)) {
fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP);
- fieldType.setNumericType(FieldType.NumericType.LONG);
+ fieldType.setNumericType(FieldType.LegacyNumericType.LONG);
}
return super.build(context);
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
index e90fdae0c47..57778fa8d25 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
@@ -18,9 +18,9 @@
*/
package org.elasticsearch.index.mapper.geo;
-import com.spatial4j.core.shape.Point;
-import com.spatial4j.core.shape.Shape;
-import com.spatial4j.core.shape.jts.JtsGeometry;
+import org.locationtech.spatial4j.shape.Point;
+import org.locationtech.spatial4j.shape.Shape;
+import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
@@ -58,7 +58,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenien
/**
- * FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s.
+ * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s.
*
* Currently Shapes can only be indexed and can only be queried using
* {@link org.elasticsearch.index.query.GeoShapeQueryParser}, consequently
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
index 18929bfd833..9a4cf70782b 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
@@ -19,14 +19,14 @@
package org.elasticsearch.index.mapper.ip;
-import org.apache.lucene.analysis.NumericTokenStream;
+import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.search.NumericRangeQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
@@ -206,7 +206,7 @@ public class IpFieldMapper extends NumberFieldMapper {
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
- NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
+ LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@@ -242,7 +242,7 @@ public class IpFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
- return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
+ return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
@@ -257,7 +257,7 @@ public class IpFieldMapper extends NumberFieldMapper {
} catch (IllegalArgumentException e) {
iSim = fuzziness.asLong();
}
- return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
+ return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@@ -356,11 +356,11 @@ public class IpFieldMapper extends NumberFieldMapper {
public static class NumericIpTokenizer extends NumericTokenizer {
public NumericIpTokenizer(int precisionStep, char[] buffer) throws IOException {
- super(new NumericTokenStream(precisionStep), buffer, null);
+ super(new LegacyNumericTokenStream(precisionStep), buffer, null);
}
@Override
- protected void setValue(NumericTokenStream tokenStream, String value) {
+ protected void setValue(LegacyNumericTokenStream tokenStream, String value) {
tokenStream.setLongValue(ipToLong(value));
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
index c8d0379d701..524266420fb 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
@@ -69,8 +69,6 @@ public final class ElasticsearchMergePolicy extends MergePolicy {
/** Return an "upgraded" view of the reader. */
static CodecReader filter(CodecReader reader) throws IOException {
- // convert 0.90.x _uid payloads to _version docvalues if needed
- reader = VersionFieldUpgrader.wrap(reader);
// TODO: remove 0.90.x/1.x freqs/prox/payloads from _uid?
// the previous code never did this, so some indexes carry around trash.
return reader;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java b/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java
deleted file mode 100644
index 42bd5420ac3..00000000000
--- a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.shard;
-
-import org.apache.lucene.codecs.DocValuesProducer;
-import org.apache.lucene.index.CodecReader;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.FilterCodecReader;
-import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.index.PostingsEnum;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.packed.GrowableWriter;
-import org.apache.lucene.util.packed.PackedInts;
-import org.elasticsearch.common.Numbers;
-import org.elasticsearch.index.mapper.internal.UidFieldMapper;
-import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-
-/**
- * Converts 0.90.x _uid payloads to _version docvalues
- */
-class VersionFieldUpgrader extends FilterCodecReader {
- final FieldInfos infos;
-
- VersionFieldUpgrader(CodecReader in) {
- super(in);
-
- // Find a free field number
- int fieldNumber = 0;
- for (FieldInfo fi : in.getFieldInfos()) {
- fieldNumber = Math.max(fieldNumber, fi.number + 1);
- }
-
- // TODO: lots of things can wrong here...
- FieldInfo newInfo = new FieldInfo(VersionFieldMapper.NAME, // field name
- fieldNumber, // field number
- false, // store term vectors
- false, // omit norms
- false, // store payloads
- IndexOptions.NONE, // index options
- DocValuesType.NUMERIC, // docvalues
- -1, // docvalues generation
- Collections.emptyMap() // attributes
- );
- newInfo.checkConsistency(); // fail merge immediately if above code is wrong
-
- final ArrayList fieldInfoList = new ArrayList<>();
- for (FieldInfo info : in.getFieldInfos()) {
- if (!info.name.equals(VersionFieldMapper.NAME)) {
- fieldInfoList.add(info);
- }
- }
- fieldInfoList.add(newInfo);
- infos = new FieldInfos(fieldInfoList.toArray(new FieldInfo[fieldInfoList.size()]));
- }
-
- static CodecReader wrap(CodecReader reader) throws IOException {
- final FieldInfos fieldInfos = reader.getFieldInfos();
- final FieldInfo versionInfo = fieldInfos.fieldInfo(VersionFieldMapper.NAME);
- if (versionInfo != null && versionInfo.getDocValuesType() != DocValuesType.NONE) {
- // the reader is a recent one, it has versions and they are stored
- // in a numeric doc values field
- return reader;
- }
- // The segment is an old one, look at the _uid field
- final Terms terms = reader.terms(UidFieldMapper.NAME);
- if (terms == null || !terms.hasPayloads()) {
- // The segment doesn't have an _uid field or doesn't have payloads
- // don't try to do anything clever. If any other segment has versions
- // all versions of this segment will be initialized to 0
- return reader;
- }
- // convert _uid payloads -> _version docvalues
- return new VersionFieldUpgrader(reader);
- }
-
- @Override
- public FieldInfos getFieldInfos() {
- return infos;
- }
-
- @Override
- public DocValuesProducer getDocValuesReader() {
- DocValuesProducer producer = in.getDocValuesReader();
- // TODO: move this nullness stuff out
- if (producer == null) {
- producer = FilterDocValuesProducer.EMPTY;
- }
- return new UninvertedVersions(producer, this);
- }
-
- static class UninvertedVersions extends FilterDocValuesProducer {
- final CodecReader reader;
-
- UninvertedVersions(DocValuesProducer in, CodecReader reader) {
- super(in);
- this.reader = reader;
- }
-
- @Override
- public NumericDocValues getNumeric(FieldInfo field) throws IOException {
- if (VersionFieldMapper.NAME.equals(field.name)) {
- // uninvert into a packed ints and expose as docvalues
- final Terms terms = reader.terms(UidFieldMapper.NAME);
- final TermsEnum uids = terms.iterator();
- final GrowableWriter versions = new GrowableWriter(2, reader.maxDoc(), PackedInts.COMPACT);
- PostingsEnum dpe = null;
- for (BytesRef uid = uids.next(); uid != null; uid = uids.next()) {
- dpe = uids.postings(dpe, PostingsEnum.PAYLOADS);
- assert terms.hasPayloads() : "field has payloads";
- final Bits liveDocs = reader.getLiveDocs();
- for (int doc = dpe.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dpe.nextDoc()) {
- if (liveDocs != null && liveDocs.get(doc) == false) {
- continue;
- }
- dpe.nextPosition();
- final BytesRef payload = dpe.getPayload();
- if (payload != null && payload.length == 8) {
- final long version = Numbers.bytesToLong(payload);
- versions.set(doc, version);
- break;
- }
- }
- }
- return versions;
- } else {
- return in.getNumeric(field);
- }
- }
-
- @Override
- public Bits getDocsWithField(FieldInfo field) throws IOException {
- if (VersionFieldMapper.NAME.equals(field.name)) {
- return new Bits.MatchAllBits(reader.maxDoc());
- } else {
- return in.getDocsWithField(field);
- }
- }
-
- @Override
- public DocValuesProducer getMergeInstance() throws IOException {
- return new UninvertedVersions(in.getMergeInstance(), reader);
- }
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
index e950ebda1b3..edbebe8f033 100644
--- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
+++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.similarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
+import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexModule;
@@ -63,6 +64,10 @@ public final class SimilarityService extends AbstractIndexComponent {
Map similaritySettings = this.indexSettings.getSettings().getGroups(IndexModule.SIMILARITY_SETTINGS_PREFIX);
for (Map.Entry entry : similaritySettings.entrySet()) {
String name = entry.getKey();
+ // Starting with v5.0 indices, it should no longer be possible to redefine built-in similarities
+ if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) {
+ throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]");
+ }
Settings settings = entry.getValue();
String typeName = settings.get("type");
if (typeName == null) {
@@ -76,9 +81,16 @@ public final class SimilarityService extends AbstractIndexComponent {
}
providers.put(name, factory.apply(name, settings));
}
- addSimilarities(similaritySettings, providers, DEFAULTS);
+ for (Map.Entry entry : addSimilarities(similaritySettings, DEFAULTS).entrySet()) {
+ // Avoid overwriting custom providers for indices older that v5.0
+ if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0)) {
+ continue;
+ }
+ providers.put(entry.getKey(), entry.getValue());
+ }
this.similarities = providers;
- defaultSimilarity = providers.get(SimilarityService.DEFAULT_SIMILARITY).get();
+ defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get()
+ : providers.get(SimilarityService.DEFAULT_SIMILARITY).get();
// Expert users can configure the base type as being different to default, but out-of-box we use default.
baseSimilarity = (providers.get("base") != null) ? providers.get("base").get() :
defaultSimilarity;
@@ -90,7 +102,9 @@ public final class SimilarityService extends AbstractIndexComponent {
defaultSimilarity;
}
- private void addSimilarities(Map similaritySettings, Map providers, Map> similarities) {
+ private Map addSimilarities(Map similaritySettings,
+ Map> similarities) {
+ Map providers = new HashMap<>(similarities.size());
for (Map.Entry> entry : similarities.entrySet()) {
String name = entry.getKey();
BiFunction factory = entry.getValue();
@@ -100,12 +114,17 @@ public final class SimilarityService extends AbstractIndexComponent {
}
providers.put(name, factory.apply(name, settings));
}
+ return providers;
}
public SimilarityProvider getSimilarity(String name) {
return similarities.get(name);
}
+ public SimilarityProvider getDefaultSimilarity() {
+ return similarities.get("default");
+ }
+
static class PerFieldSimilarity extends PerFieldSimilarityWrapper {
private final Similarity defaultSimilarity;
diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java
index cfd5dc8f066..9f712c77e70 100644
--- a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java
+++ b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java
@@ -36,7 +36,7 @@ import java.util.Objects;
*/
public class StoreFileMetaData implements Writeable {
- public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0;
+ public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_5_0_0;
private final String name;
diff --git a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java
index cd0f94567f3..54ba8638eb2 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java
@@ -22,7 +22,6 @@ import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.InputStreamDataInput;
-import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.io.Channels;
import java.io.IOException;
@@ -36,9 +35,9 @@ import java.nio.file.Path;
*/
class Checkpoint {
- static final int BUFFER_SIZE = RamUsageEstimator.NUM_BYTES_INT // ops
- + RamUsageEstimator.NUM_BYTES_LONG // offset
- + RamUsageEstimator.NUM_BYTES_LONG;// generation
+ static final int BUFFER_SIZE = Integer.BYTES // ops
+ + Long.BYTES // offset
+ + Long.BYTES;// generation
final long offset;
final int numOps;
final long generation;
diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
index 5a4438f426d..31b8db03141 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
@@ -418,10 +418,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
try {
final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out);
final long start = out.position();
- out.skip(RamUsageEstimator.NUM_BYTES_INT);
+ out.skip(Integer.BYTES);
writeOperationNoSize(checksumStreamOutput, operation);
final long end = out.position();
- final int operationSize = (int) (end - RamUsageEstimator.NUM_BYTES_INT - start);
+ final int operationSize = (int) (end - Integer.BYTES - start);
out.seek(start);
out.writeInt(operationSize);
out.seek(end);
@@ -636,7 +636,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
@Override
public long ramBytesUsed() {
- return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
+ return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + Integer.BYTES;
}
@Override
@@ -1144,10 +1144,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
for (Operation op : toWrite) {
out.reset();
final long start = out.position();
- out.skip(RamUsageEstimator.NUM_BYTES_INT);
+ out.skip(Integer.BYTES);
writeOperationNoSize(checksumStreamOutput, op);
long end = out.position();
- int operationSize = (int) (out.position() - RamUsageEstimator.NUM_BYTES_INT - start);
+ int operationSize = (int) (out.position() - Integer.BYTES - start);
out.seek(start);
out.writeInt(operationSize);
out.seek(end);
diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java
index ecc3822361c..fcb3daea796 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java
@@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.InputStreamDataInput;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
@@ -116,7 +115,7 @@ public class TranslogReader extends BaseTranslogReader implements Closeable {
if (uuidBytes.bytesEquals(ref) == false) {
throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog. path:" + path);
}
- return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + RamUsageEstimator.NUM_BYTES_INT, checkpoint.offset, checkpoint.numOps);
+ return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps);
default:
throw new TranslogCorruptedException("No known translog stream version: " + version + " path:" + path);
}
diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
index a1fc708ddaf..e215669761c 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
@@ -24,7 +24,6 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.OutputStreamDataOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.unit.ByteSizeValue;
@@ -76,7 +75,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
}
private static int getHeaderLength(int uuidLength) {
- return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT;
+ return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + Integer.BYTES;
}
public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException {
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
index 575153c8ada..32b5f55b369 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
@@ -228,7 +228,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
@Override
public long ramBytesUsed() {
- return RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG + value.length();
+ return RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES + value.length();
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
index f99b39ef620..5d2fb761842 100644
--- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
+++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
@@ -19,6 +19,8 @@
package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.hunspell.Dictionary;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractComponent;
@@ -183,7 +185,9 @@ public class HunspellService extends AbstractComponent {
affixStream = Files.newInputStream(affixFiles[0]);
- return new Dictionary(affixStream, dicStreams, ignoreCase);
+ try (Directory tmp = new SimpleFSDirectory(env.tmpFile())) {
+ return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase);
+ }
} catch (Exception e) {
logger.error("Could not load hunspell dictionary [{}]", e, locale);
diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java
index 78a1f66fb80..b38f7470e39 100644
--- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java
+++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java
@@ -20,11 +20,17 @@
package org.elasticsearch.ingest;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.core.IngestInfo;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.ProcessorInfo;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
/**
* Holder class for several ingest related services.
@@ -53,6 +59,15 @@ public class IngestService implements Closeable {
pipelineStore.buildProcessorFactoryRegistry(processorsRegistryBuilder, scriptService);
}
+ public IngestInfo info() {
+ Map processorFactories = pipelineStore.getProcessorRegistry().getProcessorFactories();
+ List processorInfoList = new ArrayList<>(processorFactories.size());
+ for (Map.Entry entry : processorFactories.entrySet()) {
+ processorInfoList.add(new ProcessorInfo(entry.getKey()));
+ }
+ return new IngestInfo(processorInfoList);
+ }
+
@Override
public void close() throws IOException {
pipelineStore.close();
diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java
index 3999f357b86..ac2df419f55 100644
--- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java
+++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java
@@ -20,6 +20,7 @@
package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
@@ -31,12 +32,15 @@ import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.ingest.core.IngestInfo;
import org.elasticsearch.ingest.core.Pipeline;
import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.ProcessorInfo;
import org.elasticsearch.ingest.core.TemplateService;
import org.elasticsearch.script.ScriptService;
@@ -47,6 +51,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener {
@@ -130,8 +135,8 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust
pipelines.remove(request.getId());
ClusterState.Builder newState = ClusterState.builder(currentState);
newState.metaData(MetaData.builder(currentState.getMetaData())
- .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines))
- .build());
+ .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines))
+ .build());
return newState.build();
}
}
@@ -139,15 +144,9 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust
/**
* Stores the specified pipeline definition in the request.
*/
- public void put(ClusterService clusterService, PutPipelineRequest request, ActionListener listener) {
+ public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, ActionListener listener) throws Exception {
// validates the pipeline and processor configuration before submitting a cluster update task:
- Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2();
- try {
- factory.create(request.getId(), pipelineConfig, processorRegistry);
- } catch(Exception e) {
- listener.onFailure(e);
- return;
- }
+ validatePipeline(ingestInfos, request);
clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), new AckedClusterStateUpdateTask(request, listener) {
@Override
@@ -162,6 +161,25 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust
});
}
+ void validatePipeline(Map ingestInfos, PutPipelineRequest request) throws Exception {
+ if (ingestInfos.isEmpty()) {
+ throw new IllegalStateException("Ingest info is empty");
+ }
+
+ Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2();
+ Pipeline pipeline = factory.create(request.getId(), pipelineConfig, processorRegistry);
+ List exceptions = new ArrayList<>();
+ for (Processor processor : pipeline.flattenAllProcessors()) {
+ for (Map.Entry entry : ingestInfos.entrySet()) {
+ if (entry.getValue().containsProcessor(processor.getType()) == false) {
+ String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]";
+ exceptions.add(new IllegalArgumentException(message));
+ }
+ }
+ }
+ ExceptionsHelper.rethrowAndSuppress(exceptions);
+ }
+
ClusterState innerPut(PutPipelineRequest request, ClusterState currentState) {
IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE);
Map pipelines;
diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java
index bd885c578b3..e831d70702e 100644
--- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java
+++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java
@@ -21,6 +21,7 @@ package org.elasticsearch.ingest;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.ProcessorInfo;
import org.elasticsearch.ingest.core.TemplateService;
import java.io.Closeable;
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java
index c784ea1c57a..ddf3781d1a6 100644
--- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java
+++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java
@@ -20,6 +20,9 @@
package org.elasticsearch.ingest.core;
+import org.elasticsearch.common.util.iterable.Iterables;
+
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -56,6 +59,24 @@ public class CompoundProcessor implements Processor {
return processors;
}
+ public List flattenProcessors() {
+ List allProcessors = new ArrayList<>(flattenProcessors(processors));
+ allProcessors.addAll(flattenProcessors(onFailureProcessors));
+ return allProcessors;
+ }
+
+ private static List flattenProcessors(List processors) {
+ List flattened = new ArrayList<>();
+ for (Processor processor : processors) {
+ if (processor instanceof CompoundProcessor) {
+ flattened.addAll(((CompoundProcessor) processor).flattenProcessors());
+ } else {
+ flattened.add(processor);
+ }
+ }
+ return flattened;
+ }
+
@Override
public String getType() {
return "compound";
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java
new file mode 100644
index 00000000000..8625e1d8884
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+public class IngestInfo implements Streamable, ToXContent {
+
+ private Set processors;
+
+ public IngestInfo() {
+ processors = Collections.emptySet();
+ }
+
+ public IngestInfo(List processors) {
+ this.processors = new LinkedHashSet<>(processors);
+ }
+
+ public Iterable getProcessors() {
+ return processors;
+ }
+
+ public boolean containsProcessor(String type) {
+ return processors.contains(new ProcessorInfo(type));
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ int size = in.readVInt();
+ Set processors = new LinkedHashSet<>(size);
+ for (int i = 0; i < size; i++) {
+ ProcessorInfo info = new ProcessorInfo();
+ info.readFrom(in);
+ processors.add(info);
+ }
+ this.processors = processors;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.write(processors.size());
+ for (ProcessorInfo info : processors) {
+ info.writeTo(out);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject("ingest");
+ builder.startArray("processors");
+ for (ProcessorInfo info : processors) {
+ info.toXContent(builder, params);
+ }
+ builder.endArray();
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ IngestInfo that = (IngestInfo) o;
+ return Objects.equals(processors, that.processors);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(processors);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java
index 9b887ec229c..821a44c0a96 100644
--- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java
+++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java
@@ -83,6 +83,14 @@ public final class Pipeline {
return compoundProcessor.getOnFailureProcessors();
}
+ /**
+ * Flattens the normal and on failure processors into a single list. The original order is lost.
+ * This can be useful for pipeline validation purposes.
+ */
+ public List flattenAllProcessors() {
+ return compoundProcessor.flattenProcessors();
+ }
+
public final static class Factory {
public Pipeline create(String id, Map config, ProcessorsRegistry processorRegistry) throws Exception {
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java
new file mode 100644
index 00000000000..64c3d19719b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+public class ProcessorInfo implements Streamable, ToXContent {
+
+ private String type;
+
+ ProcessorInfo() {
+ }
+
+ public ProcessorInfo(String type) {
+ this.type = type;
+ }
+
+ /**
+ * @return The unique processor type
+ */
+ public String getType() {
+ return type;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ this.type = in.readString();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(this.type);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("type", type);
+ builder.endObject();
+ return null;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ProcessorInfo that = (ProcessorInfo) o;
+
+ return type.equals(that.type);
+
+ }
+
+ @Override
+ public int hashCode() {
+ return type.hashCode();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java
index b5b8e8f2cb6..88b2fe48868 100644
--- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java
+++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java
@@ -84,7 +84,6 @@ public class NodeService extends AbstractComponent implements Closeable {
this.transportService = transportService;
this.indicesService = indicesService;
this.discovery = discovery;
- discovery.setNodeService(this);
this.version = version;
this.pluginService = pluginService;
this.circuitBreakerService = circuitBreakerService;
@@ -132,12 +131,13 @@ public class NodeService extends AbstractComponent implements Closeable {
threadPool.info(),
transportService.info(),
httpServer == null ? null : httpServer.info(),
- pluginService == null ? null : pluginService.info()
+ pluginService == null ? null : pluginService.info(),
+ ingestService == null ? null : ingestService.info()
);
}
public NodeInfo info(boolean settings, boolean os, boolean process, boolean jvm, boolean threadPool,
- boolean transport, boolean http, boolean plugin) {
+ boolean transport, boolean http, boolean plugin, boolean ingest) {
return new NodeInfo(version, Build.CURRENT, discovery.localNode(), serviceAttributes,
settings ? settingsFilter.filter(this.settings) : null,
os ? monitorService.osService().info() : null,
@@ -146,7 +146,8 @@ public class NodeService extends AbstractComponent implements Closeable {
threadPool ? this.threadPool.info() : null,
transport ? transportService.info() : null,
http ? (httpServer == null ? null : httpServer.info()) : null,
- plugin ? (pluginService == null ? null : pluginService.info()) : null
+ plugin ? (pluginService == null ? null : pluginService.info()) : null,
+ ingest ? (ingestService == null ? null : ingestService.info()) : null
);
}
diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java
index b3208b4133c..98be7d308af 100644
--- a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java
+++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java
@@ -115,10 +115,6 @@ final class PercolatorQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
- if (getBoost() != 1f) {
- return super.rewrite(reader);
- }
-
Query rewritten = percolatorQueriesQuery.rewrite(reader);
if (rewritten != percolatorQueriesQuery) {
return new PercolatorQuery(rewritten, percolatorIndexSearcher, percolatorQueries);
diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
index 552e6aaf2e4..a6ea381adb4 100644
--- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
+++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
@@ -458,7 +458,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent extends BlobStoreForm
BytesReference bytes = write(obj);
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
final String resourceDesc = "ChecksumBlobStoreFormat.writeBlob(blob=\"" + blobName + "\")";
- try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, byteArrayOutputStream, BUFFER_SIZE)) {
+ try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, blobName, byteArrayOutputStream, BUFFER_SIZE)) {
CodecUtil.writeHeader(indexOutput, codec, VERSION);
try (OutputStream indexOutputOutputStream = new IndexOutputOutputStream(indexOutput) {
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
index f11efeca87d..bd6637cb788 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
@@ -48,7 +48,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
public class RestNodesInfoAction extends BaseRestHandler {
private final SettingsFilter settingsFilter;
- private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport");
+ private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport", "ingest");
@Inject
public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
@@ -101,6 +101,7 @@ public class RestNodesInfoAction extends BaseRestHandler {
nodesInfoRequest.transport(metrics.contains("transport"));
nodesInfoRequest.http(metrics.contains("http"));
nodesInfoRequest.plugins(metrics.contains("plugins"));
+ nodesInfoRequest.ingest(metrics.contains("ingest"));
}
settingsFilter.addFilterSettingParams(request);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java
index 99cdc16253a..658090bb6db 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java
@@ -52,10 +52,10 @@ public class RestCancelTasksAction extends BaseRestHandler {
TaskId parentTaskId = new TaskId(request.param("parent_task_id"));
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
- cancelTasksRequest.taskId(taskId);
- cancelTasksRequest.nodesIds(nodesIds);
- cancelTasksRequest.actions(actions);
- cancelTasksRequest.parentTaskId(parentTaskId);
+ cancelTasksRequest.setTaskId(taskId);
+ cancelTasksRequest.setNodesIds(nodesIds);
+ cancelTasksRequest.setActions(actions);
+ cancelTasksRequest.setParentTaskId(parentTaskId);
client.admin().cluster().cancelTasks(cancelTasksRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
index 992267fa8a5..9a9d1991298 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
@@ -50,13 +50,15 @@ public class RestListTasksAction extends BaseRestHandler {
TaskId taskId = new TaskId(request.param("taskId"));
String[] actions = Strings.splitStringByCommaToArray(request.param("actions"));
TaskId parentTaskId = new TaskId(request.param("parent_task_id"));
+ boolean waitForCompletion = request.paramAsBoolean("wait_for_completion", false);
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.taskId(taskId);
- listTasksRequest.nodesIds(nodesIds);
- listTasksRequest.detailed(detailed);
- listTasksRequest.actions(actions);
- listTasksRequest.parentTaskId(parentTaskId);
+ listTasksRequest.setTaskId(taskId);
+ listTasksRequest.setNodesIds(nodesIds);
+ listTasksRequest.setDetailed(detailed);
+ listTasksRequest.setActions(actions);
+ listTasksRequest.setParentTaskId(parentTaskId);
+ listTasksRequest.setWaitForCompletion(waitForCompletion);
client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel));
}
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
index 4e90a6a3a85..a3d0cc84559 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
@@ -144,8 +144,12 @@ public class RestAnalyzeAction extends BaseRestHandler {
charFilters.add(parser.text());
}
analyzeRequest.charFilters(charFilters.toArray(new String[charFilters.size()]));
- } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN) && token == XContentParser.Token.VALUE_BOOLEAN) {
- analyzeRequest.explain(parser.booleanValue());
+ } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN)) {
+ if (parser.isBooleanValue()) {
+ analyzeRequest.explain(parser.booleanValue());
+ } else {
+ throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'");
+ }
} else if (parseFieldMatcher.match(currentFieldName, Fields.ATTRIBUTES) && token == XContentParser.Token.START_ARRAY){
List attributes = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java
index b0479475d86..1821124473f 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java
@@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -108,7 +109,7 @@ public class SignificantStringTerms extends InternalSignificantTerms
@Override
protected void doReadFrom(StreamInput in) throws IOException {
- if (in.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
- this.docCountError = in.readLong();
- } else {
- this.docCountError = -1;
- }
+ this.docCountError = in.readLong();
this.order = InternalOrder.Streams.readOrder(in);
this.formatter = ValueFormatterStreams.readOptional(in);
this.requiredSize = readSize(in);
@@ -218,9 +214,7 @@ public class DoubleTerms extends InternalTerms
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
- if (out.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
- out.writeLong(docCountError);
- }
+ out.writeLong(docCountError);
InternalOrder.Streams.writeOrder(order, out);
ValueFormatterStreams.writeOptional(formatter, out);
writeSize(requiredSize, out);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java
index 91e949e190f..4377b9debbb 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java
@@ -26,7 +26,6 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet;
-import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasables;
@@ -136,7 +135,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
protected static void copy(BytesRef from, BytesRef to) {
if (to.bytes.length < from.length) {
- to.bytes = new byte[ArrayUtil.oversize(from.length, RamUsageEstimator.NUM_BYTES_BYTE)];
+ to.bytes = new byte[ArrayUtil.oversize(from.length, 1)];
}
to.offset = 0;
to.length = from.length;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java
index 0b9ebd97cf9..040768f9d3b 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java
@@ -105,7 +105,7 @@ public class StringTerms extends InternalTerms
@Override
int compareTerm(Terms.Bucket other) {
- return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
+ return termBytes.compareTo(((Bucket) other).termBytes);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java
index eee9d4cbf90..41dd0bb441e 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java
@@ -518,13 +518,13 @@ public class IncludeExclude implements Writeable, ToXContent {
if (includeValues != null) {
for (BytesRef val : includeValues) {
double dval=Double.parseDouble(val.utf8ToString());
- result.addAccept( NumericUtils.doubleToSortableLong(dval));
+ result.addAccept(NumericUtils.doubleToSortableLong(dval));
}
}
if (excludeValues != null) {
for (BytesRef val : excludeValues) {
double dval=Double.parseDouble(val.utf8ToString());
- result.addReject( NumericUtils.doubleToSortableLong(dval));
+ result.addReject(NumericUtils.doubleToSortableLong(dval));
}
}
return result;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java
index 2e8ce4563ce..568ecdbec59 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java
@@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet;
-import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -67,7 +66,7 @@ public final class HyperLogLogPlusPlus implements Releasable {
*/
public static int precisionFromThreshold(long count) {
final long hashTableEntries = (long) Math.ceil(count / MAX_LOAD_FACTOR);
- int precision = PackedInts.bitsRequired(hashTableEntries * RamUsageEstimator.NUM_BYTES_INT);
+ int precision = PackedInts.bitsRequired(hashTableEntries * Integer.BYTES);
precision = Math.max(precision, MIN_PRECISION);
precision = Math.min(precision, MAX_PRECISION);
return precision;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
index 4687002cf12..2dfab325127 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java
@@ -167,14 +167,12 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue
}
@Override
- public InternalAggregation buildAggregation(long owningBucketOrdinal) {
- if (valuesSource == null) {
- return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter,
- pipelineAggregators(), metaData());
+ public InternalAggregation buildAggregation(long bucket) {
+ if (valuesSource == null || bucket >= counts.size()) {
+ return buildEmptyAggregation();
}
- assert owningBucketOrdinal < counts.size();
- return new InternalExtendedStats(name, counts.get(owningBucketOrdinal), sums.get(owningBucketOrdinal),
- mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter,
+ return new InternalExtendedStats(name, counts.get(bucket), sums.get(bucket),
+ mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, formatter,
pipelineAggregators(), metaData());
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java
index 543c5907070..9fac5809cef 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java
@@ -158,19 +158,13 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat
@Override
public void readOtherStatsFrom(StreamInput in) throws IOException {
sumOfSqrs = in.readDouble();
- if (in.getVersion().onOrAfter(Version.V_1_4_3)) {
- sigma = in.readDouble();
- } else {
- sigma = 2.0;
- }
+ sigma = in.readDouble();
}
@Override
protected void writeOtherStatsTo(StreamOutput out) throws IOException {
out.writeDouble(sumOfSqrs);
- if (out.getVersion().onOrAfter(Version.V_1_4_3)) {
- out.writeDouble(sigma);
- }
+ out.writeDouble(sigma);
}
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java
index 8ad24b5cb19..7a15f67dbd6 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java
@@ -78,12 +78,12 @@ public final class CustomQueryScorer extends QueryScorer {
Map terms) throws IOException {
if (query instanceof FunctionScoreQuery) {
query = ((FunctionScoreQuery) query).getSubQuery();
- extract(query, query.getBoost(), terms);
+ extract(query, 1F, terms);
} else if (query instanceof FiltersFunctionScoreQuery) {
query = ((FiltersFunctionScoreQuery) query).getSubQuery();
- extract(query, query.getBoost(), terms);
+ extract(query, 1F, terms);
} else if (terms.isEmpty()) {
- extractWeightedTerms(terms, query, query.getBoost());
+ extractWeightedTerms(terms, query, 1F);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java
index b3175e6c22a..b9ae34b60b0 100644
--- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java
+++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java
@@ -89,23 +89,12 @@ public final class FragmentBuilderHelper {
}
if (analyzer instanceof CustomAnalyzer) {
final CustomAnalyzer a = (CustomAnalyzer) analyzer;
- if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory
- || (a.tokenizerFactory() instanceof NGramTokenizerFactory
- && !((NGramTokenizerFactory)a.tokenizerFactory()).version().onOrAfter(Version.LUCENE_4_2))) {
- // ngram tokenizer is broken before 4.2
- return true;
- }
TokenFilterFactory[] tokenFilters = a.tokenFilters();
for (TokenFilterFactory tokenFilterFactory : tokenFilters) {
if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory
|| tokenFilterFactory instanceof EdgeNGramTokenFilterFactory) {
return true;
}
- if (tokenFilterFactory instanceof NGramTokenFilterFactory
- && !((NGramTokenFilterFactory)tokenFilterFactory).version().onOrAfter(Version.LUCENE_4_2)) {
- // ngram token filter is broken before 4.2
- return true;
- }
}
}
return false;
diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
index 6c01a27442e..8c3c19343b4 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
@@ -22,7 +22,6 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
@@ -233,9 +232,6 @@ public class DefaultSearchContext extends SearchContext {
Query result;
if (Queries.isConstantMatchAllQuery(query())) {
result = new ConstantScoreQuery(searchFilter);
- if (query().getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) {
- result = new BoostQuery(result, query().getBoost());
- }
} else {
result = new BooleanQuery.Builder()
.add(query, Occur.MUST)
diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
index c415fd5a70b..4f082b057da 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
@@ -35,8 +35,6 @@ public class FieldSortBuilder extends SortBuilder {
private Object missing;
- private Boolean ignoreUnmapped;
-
private String unmappedType;
private String sortMode;
@@ -76,17 +74,6 @@ public class FieldSortBuilder extends SortBuilder {
return this;
}
- /**
- * Sets if the field does not exists in the index, it should be ignored and not sorted by or not. Defaults
- * to false (not ignoring).
- * @deprecated Use {@link #unmappedType(String)} instead.
- */
- @Deprecated
- public FieldSortBuilder ignoreUnmapped(boolean ignoreUnmapped) {
- this.ignoreUnmapped = ignoreUnmapped;
- return this;
- }
-
/**
* Set the type to use in case the current field is not mapped in an index.
* Specifying a type tells Elasticsearch what type the sort values should have, which is important
@@ -138,9 +125,6 @@ public class FieldSortBuilder extends SortBuilder {
if (missing != null) {
builder.field("missing", missing);
}
- if (ignoreUnmapped != null) {
- builder.field(SortParseElement.IGNORE_UNMAPPED.getPreferredName(), ignoreUnmapped);
- }
if (unmappedType != null) {
builder.field(SortParseElement.UNMAPPED_TYPE.getPreferredName(), unmappedType);
}
diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
index a99158787d3..5349d6fc0d6 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java
@@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchParseElement;
@@ -55,7 +54,6 @@ public class SortParseElement implements SearchParseElement {
private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC);
private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true);
- public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped");
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
public static final String SCORE_FIELD_NAME = "_score";
@@ -156,12 +154,6 @@ public class SortParseElement implements SearchParseElement {
}
} else if ("missing".equals(innerJsonName)) {
missing = parser.textOrNull();
- } else if (context.parseFieldMatcher().match(innerJsonName, IGNORE_UNMAPPED)) {
- // backward compatibility: ignore_unmapped has been replaced with unmapped_type
- if (unmappedType == null // don't override if unmapped_type has been provided too
- && parser.booleanValue()) {
- unmappedType = LongFieldMapper.CONTENT_TYPE;
- }
} else if (context.parseFieldMatcher().match(innerJsonName, UNMAPPED_TYPE)) {
unmappedType = parser.textOrNull();
} else if ("mode".equals(innerJsonName)) {
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index c7f4392e56a..8b6f1198705 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -222,7 +222,7 @@ public class ThreadPool extends AbstractComponent implements Closeable {
int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5);
int halfProcMaxAt10 = Math.min(((availableProcessors + 1) / 2), 10);
Map defaultExecutorTypeSettings = new HashMap<>();
- add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).keepAlive("30s"));
+ add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).size(4 * availableProcessors).keepAlive("30s"));
add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INDEX).size(availableProcessors).queueSize(200));
add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.BULK).size(availableProcessors).queueSize(50));
add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GET).size(availableProcessors).queueSize(1000));
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
index 608b33db0fe..4909959015b 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
@@ -31,9 +31,12 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
-grant codeBase "${codebase.lucene-core-5.5.0.jar}" {
+grant codeBase "${codebase.lucene-core-6.0.0-snapshot-bea235f.jar}" {
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
+ // java 8 package
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
+ // java 9 "package"
+ permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.ref";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// NOTE: also needed for RAMUsageEstimator size calculations
permission java.lang.RuntimePermission "accessDeclaredMembers";
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
index 856cd50e2a9..fafa57118c2 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
-grant codeBase "${codebase.lucene-test-framework-5.5.0.jar}" {
+grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-bea235f.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java
index 39b4df44059..94806422c17 100644
--- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java
+++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java
@@ -82,7 +82,7 @@ public class BlendedTermQueryTests extends ESTestCase {
w.addDocument(d);
}
w.commit();
- DirectoryReader reader = DirectoryReader.open(w, true);
+ DirectoryReader reader = DirectoryReader.open(w);
IndexSearcher searcher = setSimilarity(newSearcher(reader));
{
@@ -143,7 +143,7 @@ public class BlendedTermQueryTests extends ESTestCase {
w.addDocument(d);
}
w.commit();
- DirectoryReader reader = DirectoryReader.open(w, true);
+ DirectoryReader reader = DirectoryReader.open(w);
IndexSearcher searcher = setSimilarity(newSearcher(reader));
{
String[] fields = new String[]{"username", "song"};
diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java
index 4669f5bc718..7824ecd39b1 100644
--- a/core/src/test/java/org/elasticsearch/VersionTests.java
+++ b/core/src/test/java/org/elasticsearch/VersionTests.java
@@ -31,8 +31,8 @@ import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
-import static org.elasticsearch.Version.V_0_20_0;
-import static org.elasticsearch.Version.V_0_90_0;
+import static org.elasticsearch.Version.V_2_2_0;
+import static org.elasticsearch.Version.V_5_0_0;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString;
@@ -42,21 +42,27 @@ import static org.hamcrest.Matchers.sameInstance;
public class VersionTests extends ESTestCase {
public void testVersionComparison() throws Exception {
- assertThat(V_0_20_0.before(V_0_90_0), is(true));
- assertThat(V_0_20_0.before(V_0_20_0), is(false));
- assertThat(V_0_90_0.before(V_0_20_0), is(false));
+ assertThat(V_2_2_0.before(V_5_0_0), is(true));
+ assertThat(V_2_2_0.before(V_2_2_0), is(false));
+ assertThat(V_5_0_0.before(V_2_2_0), is(false));
- assertThat(V_0_20_0.onOrBefore(V_0_90_0), is(true));
- assertThat(V_0_20_0.onOrBefore(V_0_20_0), is(true));
- assertThat(V_0_90_0.onOrBefore(V_0_20_0), is(false));
+ assertThat(V_2_2_0.onOrBefore(V_5_0_0), is(true));
+ assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true));
+ assertThat(V_5_0_0.onOrBefore(V_2_2_0), is(false));
- assertThat(V_0_20_0.after(V_0_90_0), is(false));
- assertThat(V_0_20_0.after(V_0_20_0), is(false));
- assertThat(V_0_90_0.after(V_0_20_0), is(true));
+ assertThat(V_2_2_0.after(V_5_0_0), is(false));
+ assertThat(V_2_2_0.after(V_2_2_0), is(false));
+ assertThat(V_5_0_0.after(V_2_2_0), is(true));
+
+ assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false));
+ assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true));
+ assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true));
+
+ assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
+ assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
+ assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
+ assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
- assertThat(V_0_20_0.onOrAfter(V_0_90_0), is(false));
- assertThat(V_0_20_0.onOrAfter(V_0_20_0), is(true));
- assertThat(V_0_90_0.onOrAfter(V_0_20_0), is(true));
}
public void testVersionConstantPresent() {
@@ -127,31 +133,57 @@ public class VersionTests extends ESTestCase {
public void testIndexCreatedVersion() {
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
- final Version version = randomFrom(Version.V_0_18_0, Version.V_0_90_13, Version.V_1_3_0);
+ final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0);
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
}
public void testMinCompatVersion() {
assertThat(Version.V_2_0_0_beta1.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0_beta1));
- assertThat(Version.V_1_3_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
- assertThat(Version.V_1_2_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
- assertThat(Version.V_1_2_3.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
- assertThat(Version.V_1_0_0_RC2.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0_RC2));
+ assertThat(Version.V_2_1_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
+ assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
+ assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
+ assertThat(Version.V_5_0_0.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0));
}
public void testToString() {
// with 2.0.beta we lowercase
assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString());
- assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.toString());
- assertEquals("1.4.0", Version.V_1_4_0.toString());
+ assertEquals("5.0.0", Version.V_5_0_0.toString());
+ assertEquals("2.3.0", Version.V_2_3_0.toString());
+ assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString());
+ assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString());
+ assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString());
+ assertEquals("5.0.0-beta1", Version.fromString("5.0.0-beta1").toString());
+ assertEquals("5.0.0-alpha1", Version.fromString("5.0.0-alpha1").toString());
}
public void testIsBeta() {
assertTrue(Version.V_2_0_0_beta1.isBeta());
- assertTrue(Version.V_1_4_0_Beta1.isBeta());
- assertFalse(Version.V_1_4_0.isBeta());
+ assertTrue(Version.fromString("1.0.0.Beta1").isBeta());
+ assertTrue(Version.fromString("0.90.0.Beta1").isBeta());
}
+
+ public void testIsAlpha() {
+ assertTrue(new Version(5000001, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha());
+ assertFalse(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha());
+ assertTrue(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isBeta());
+ assertTrue(Version.fromString("5.0.0-alpha14").isAlpha());
+ assertEquals(5000014, Version.fromString("5.0.0-alpha14").id);
+ assertTrue(Version.fromId(5000015).isAlpha());
+
+ for (int i = 0 ; i < 25; i++) {
+ assertEquals(Version.fromString("5.0.0-alpha" + i).id, Version.fromId(5000000 + i).id);
+ assertEquals("5.0.0-alpha" + i, Version.fromId(5000000 + i).toString());
+ }
+
+ for (int i = 0 ; i < 25; i++) {
+ assertEquals(Version.fromString("5.0.0-beta" + i).id, Version.fromId(5000000 + i + 25).id);
+ assertEquals("5.0.0-beta" + i, Version.fromId(5000000 + i + 25).toString());
+ }
+ }
+
+
public void testParseVersion() {
final int iters = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) {
@@ -162,6 +194,17 @@ public class VersionTests extends ESTestCase {
Version parsedVersion = Version.fromString(version.toString());
assertEquals(version, parsedVersion);
}
+
+ expectThrows(IllegalArgumentException.class, () -> {
+ Version.fromString("5.0.0-alph2");
+ });
+ assertSame(Version.CURRENT, Version.fromString(Version.CURRENT.toString()));
+
+ assertSame(Version.fromString("2.0.0-SNAPSHOT"), Version.fromString("2.0.0"));
+
+ expectThrows(IllegalArgumentException.class, () -> {
+ Version.fromString("5.0.0-SNAPSHOT");
+ });
}
public void testParseLenient() {
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
index 5109ab979cf..586f178d12d 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
@@ -237,8 +237,8 @@ public class CancellableTasksTests extends TaskManagerTestCase {
// Cancel main task
CancelTasksRequest request = new CancelTasksRequest();
- request.reason("Testing Cancellation");
- request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()));
+ request.setReason("Testing Cancellation");
+ request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()));
// And send the cancellation request to a random node
CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request)
.get();
@@ -270,7 +270,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
// Make sure that tasks are no longer running
ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)]
- .transportListTasksAction.execute(new ListTasksRequest().taskId(
+ .transportListTasksAction.execute(new ListTasksRequest().setTaskId(
new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()))).get();
assertEquals(0, listTasksResponse.getTasks().size());
@@ -313,7 +313,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
// Make sure that tasks are running
ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)]
- .transportListTasksAction.execute(new ListTasksRequest().parentTaskId(new TaskId(mainNode, mainTask.getId()))).get();
+ .transportListTasksAction.execute(new ListTasksRequest().setParentTaskId(new TaskId(mainNode, mainTask.getId()))).get();
assertThat(listTasksResponse.getTasks().size(), greaterThanOrEqualTo(blockOnNodes.size()));
// Simulate the coordinating node leaving the cluster
@@ -331,8 +331,8 @@ public class CancellableTasksTests extends TaskManagerTestCase {
logger.info("--> Simulate issuing cancel request on the node that is about to leave the cluster");
// Simulate issuing cancel request on the node that is about to leave the cluster
CancelTasksRequest request = new CancelTasksRequest();
- request.reason("Testing Cancellation");
- request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()));
+ request.setReason("Testing Cancellation");
+ request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()));
// And send the cancellation request to a random node
CancelTasksResponse response = testNodes[0].transportCancelTasksAction.execute(request).get();
logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster");
@@ -356,7 +356,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
// Make sure that tasks are no longer running
try {
ListTasksResponse listTasksResponse1 = testNodes[randomIntBetween(1, testNodes.length - 1)]
- .transportListTasksAction.execute(new ListTasksRequest().taskId(new TaskId(mainNode, mainTask.getId()))).get();
+ .transportListTasksAction.execute(new ListTasksRequest().setTaskId(new TaskId(mainNode, mainTask.getId()))).get();
assertEquals(0, listTasksResponse1.getTasks().size());
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
index eaa3caf9084..8c791a99018 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
@@ -18,6 +18,8 @@
*/
package org.elasticsearch.action.admin.cluster.node.tasks;
+import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
@@ -40,6 +42,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.tasks.MockTaskManager;
import org.elasticsearch.test.tasks.MockTaskManagerListener;
import org.elasticsearch.test.transport.MockTransportService;
+import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import java.io.IOException;
import java.util.ArrayList;
@@ -54,8 +57,11 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Function;
+import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
+import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.emptyCollectionOf;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
@@ -327,6 +333,78 @@ public class TasksIT extends ESIntegTestCase {
assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size());
}
+ public void testTasksListWaitForCompletion() throws Exception {
+ // Start blocking test task
+ ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
+ .execute();
+
+ ListenableActionFuture waitResponseFuture;
+ try {
+ // Wait for the task to start on all nodes
+ assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(),
+ client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
+
+ // Spin up a request to wait for that task to finish
+ waitResponseFuture = client().admin().cluster().prepareListTasks()
+ .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).execute();
+ } finally {
+ // Unblock the request so the wait for completion request can finish
+ TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
+ }
+
+ // Now that the task is unblocked the list response will come back
+ ListTasksResponse waitResponse = waitResponseFuture.get();
+ // If any tasks come back then they are the tasks we asked for - it'd be super weird if this wasn't true
+ for (TaskInfo task: waitResponse.getTasks()) {
+ assertEquals(task.getAction(), TestTaskPlugin.TestTaskAction.NAME + "[n]");
+ }
+ // See the next test to cover the timeout case
+
+ future.get();
+ }
+
+ public void testTasksListWaitForTimeout() throws Exception {
+ // Start blocking test task
+ ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
+ .execute();
+ try {
+ // Wait for the task to start on all nodes
+ assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(),
+ client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
+
+ // Spin up a request that should wait for those tasks to finish
+ // It will timeout because we haven't unblocked the tasks
+ ListTasksResponse waitResponse = client().admin().cluster().prepareListTasks()
+ .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(100))
+ .get();
+
+ assertFalse(waitResponse.getNodeFailures().isEmpty());
+ for (FailedNodeException failure : waitResponse.getNodeFailures()) {
+ Throwable timeoutException = failure.getCause();
+ // The exception sometimes comes back wrapped depending on the client
+ if (timeoutException.getCause() != null) {
+ timeoutException = timeoutException.getCause();
+ }
+ assertThat(timeoutException,
+ either(instanceOf(ElasticsearchTimeoutException.class)).or(instanceOf(ReceiveTimeoutTransportException.class)));
+ }
+ } finally {
+ // Now we can unblock those requests
+ TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
+ }
+ future.get();
+ }
+
+ public void testTasksListWaitForNoTask() throws Exception {
+ // Spin up a request to wait for no matching tasks
+ ListenableActionFuture waitResponseFuture = client().admin().cluster().prepareListTasks()
+ .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(10))
+ .execute();
+
+ // It should finish quickly and without complaint
+ assertThat(waitResponseFuture.get().getTasks(), emptyCollectionOf(TaskInfo.class));
+ }
+
@Override
public void tearDown() throws Exception {
for (Map.Entry, RecordingTaskManagerListener> entry : listeners.entrySet()) {
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
index 0d4372a51eb..e8dcd228e50 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
@@ -345,7 +345,10 @@ public class TestTaskPlugin extends Plugin {
public static class UnblockTestTasksRequest extends BaseTasksRequest {
-
+ @Override
+ public boolean match(Task task) {
+ return task instanceof TestTask && super.match(task);
+ }
}
public static class UnblockTestTasksResponse extends BaseTasksResponse {
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
index e1501f9b14c..556eee238fd 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
@@ -355,7 +355,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
int testNodeNum = randomIntBetween(0, testNodes.length - 1);
TestNode testNode = testNodes[testNodeNum];
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions("testAction*"); // pick all test actions
+ listTasksRequest.setActions("testAction*"); // pick all test actions
logger.info("Listing currently running tasks using node [{}]", testNodeNum);
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
logger.info("Checking currently running tasks");
@@ -371,7 +371,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Check task counts using transport with filtering
testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions("testAction[n]"); // only pick node actions
+ listTasksRequest.setActions("testAction[n]"); // only pick node actions
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) {
@@ -380,7 +380,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
}
// Check task counts using transport with detailed description
- listTasksRequest.detailed(true); // same request only with detailed description
+ listTasksRequest.setDetailed(true); // same request only with detailed description
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) {
@@ -389,7 +389,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
}
// Make sure that the main task on coordinating node is the task that was returned to us by execute()
- listTasksRequest.actions("testAction"); // only pick the main task
+ listTasksRequest.setActions("testAction"); // only pick the main task
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(1, response.getTasks().size());
assertEquals(mainTask.getId(), response.getTasks().get(0).getId());
@@ -417,7 +417,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions("testAction");
+ listTasksRequest.setActions("testAction");
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(1, response.getTasks().size());
String parentNode = response.getTasks().get(0).getNode().getId();
@@ -425,7 +425,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Find tasks with common parent
listTasksRequest = new ListTasksRequest();
- listTasksRequest.parentTaskId(new TaskId(parentNode, parentTaskId));
+ listTasksRequest.setParentTaskId(new TaskId(parentNode, parentTaskId));
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getTasks().size());
for (TaskInfo task : response.getTasks()) {
@@ -451,7 +451,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions("testAction*");
+ listTasksRequest.setActions("testAction*");
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(0, response.getTasks().size());
@@ -472,7 +472,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Check task counts using transport with filtering
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions("testAction[n]"); // only pick node actions
+ listTasksRequest.setActions("testAction[n]"); // only pick node actions
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) {
@@ -482,7 +482,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Check task counts using transport with detailed description
long minimalDurationNanos = System.nanoTime() - maximumStartTimeNanos;
- listTasksRequest.detailed(true); // same request only with detailed description
+ listTasksRequest.setDetailed(true); // same request only with detailed description
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) {
@@ -518,9 +518,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Try to cancel main task using action name
CancelTasksRequest request = new CancelTasksRequest();
- request.nodesIds(testNodes[0].discoveryNode.getId());
- request.reason("Testing Cancellation");
- request.actions(actionName);
+ request.setNodesIds(testNodes[0].discoveryNode.getId());
+ request.setReason("Testing Cancellation");
+ request.setActions(actionName);
CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request)
.get();
@@ -532,8 +532,8 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Try to cancel main task using id
request = new CancelTasksRequest();
- request.reason("Testing Cancellation");
- request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId()));
+ request.setReason("Testing Cancellation");
+ request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId()));
response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get();
// Shouldn't match any tasks since testAction doesn't support cancellation
@@ -544,7 +544,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Make sure that task is still running
ListTasksRequest listTasksRequest = new ListTasksRequest();
- listTasksRequest.actions(actionName);
+ listTasksRequest.setActions(actionName);
ListTasksResponse listResponse = testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction.execute
(listTasksRequest).get();
assertEquals(1, listResponse.getPerNodeTasks().size());
@@ -617,7 +617,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
// Run task action on node tasks that are currently running
// should be successful on all nodes except one
TestTasksRequest testTasksRequest = new TestTasksRequest();
- testTasksRequest.actions("testAction[n]"); // pick all test actions
+ testTasksRequest.setActions("testAction[n]"); // pick all test actions
TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get();
// Get successful responses from all nodes except one
assertEquals(testNodes.length - 1, response.tasks.size());
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java
index a2d838bc3fd..fc04de81254 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java
@@ -55,13 +55,7 @@ public class ClusterStateRequestTests extends ESTestCase {
assertThat(deserializedCSRequest.nodes(), equalTo(clusterStateRequest.nodes()));
assertThat(deserializedCSRequest.blocks(), equalTo(clusterStateRequest.blocks()));
assertThat(deserializedCSRequest.indices(), equalTo(clusterStateRequest.indices()));
-
- if (testVersion.onOrAfter(Version.V_1_5_0)) {
- assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions());
- } else {
- // versions before V_1_5_0 use IndicesOptions.lenientExpandOpen()
- assertOptionsMatch(deserializedCSRequest.indicesOptions(), IndicesOptions.lenientExpandOpen());
- }
+ assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions());
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java
index 9d8002210e7..baca9508a8b 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java
@@ -98,13 +98,7 @@ public class UpgradeIT extends ESBackcompatTestCase {
}
indexRandom(true, docs);
ensureGreen(indexName);
- if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) {
- // before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we
- // need to continue to try flushing until all shards succeed
- assertTrue(awaitBusy(() -> flush(indexName).getFailedShards() == 0));
- } else {
- assertEquals(0, flush(indexName).getFailedShards());
- }
+ assertEquals(0, flush(indexName).getFailedShards());
// index more docs that won't be flushed
numDocs = scaledRandomIntBetween(100, 1000);
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
index 45986eab00e..3c269c39004 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
@@ -131,7 +131,7 @@ public class BootstrapCheckTests extends ESTestCase {
}
public void testMaxNumberOfThreadsCheck() {
- final int limit = 1 << 15;
+ final int limit = 1 << 11;
final AtomicLong maxNumberOfThreads = new AtomicLong(randomIntBetween(1, limit - 1));
final BootstrapCheck.MaxNumberOfThreadsCheck check = new BootstrapCheck.MaxNumberOfThreadsCheck() {
@Override
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java
index 9a0316050b1..9ea9b340c20 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java
@@ -104,17 +104,8 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
}
private String randomAnalyzer() {
- while(true) {
- PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
- if (preBuiltAnalyzers == PreBuiltAnalyzers.SORANI && compatibilityVersion().before(Version.V_1_3_0)) {
- continue; // SORANI was added in 1.3.0
- }
- if (preBuiltAnalyzers == PreBuiltAnalyzers.LITHUANIAN && compatibilityVersion().before(Version.V_2_1_0)) {
- continue; // LITHUANIAN was added in 2.1.0
- }
- return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
- }
-
+ PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
+ return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
}
private static final class InputOutput {
@@ -127,7 +118,5 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
this.input = input;
this.field = field;
}
-
-
}
}
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
index 5b7c4fa37ba..7e46825398b 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
@@ -188,10 +188,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i));
}
indexRandom(true, docs);
- if (compatibilityVersion().before(Version.V_1_3_0)) {
- // issue another refresh through a new node to side step issue #6545
- assertNoFailures(backwardsCluster().internalCluster().dataNodeClient().admin().indices().prepareRefresh().setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
- }
numDocs *= 2;
}
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
index b13cee98565..8e3dbd5f563 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
@@ -446,7 +446,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// #10067: create-bwc-index.py deleted any doc with long_sort:[10-20]
void assertDeleteByQueryWorked(String indexName, Version version) throws Exception {
- if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) {
+ if (version.onOrAfter(Version.V_2_0_0_beta1)) {
// TODO: remove this once #10262 is fixed
return;
}
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
index ec73edd493f..483040209d0 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
@@ -194,14 +194,11 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1));
assertThat(template.mappings().size(), equalTo(1));
assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}"));
- if (Version.fromString(version).onOrAfter(Version.V_1_1_0)) {
- // Support for aliases in templates was added in v1.1.0
- assertThat(template.aliases().size(), equalTo(3));
- assertThat(template.aliases().get("alias1"), notNullValue());
- assertThat(template.aliases().get("alias2").filter().string(), containsString(version));
- assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy"));
- assertThat(template.aliases().get("{index}-alias"), notNullValue());
- }
+ assertThat(template.aliases().size(), equalTo(3));
+ assertThat(template.aliases().get("alias1"), notNullValue());
+ assertThat(template.aliases().get("alias2").filter().string(), containsString(version));
+ assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy"));
+ assertThat(template.aliases().get("{index}-alias"), notNullValue());
logger.info("--> cleanup");
cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()]));
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java
index a43da9e53fa..0d8784834fa 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java
@@ -83,7 +83,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase {
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_CREATION_DATE, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, "BOOM")
- .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID)
+ .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1)
.put(indexSettings)
.build();
IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
index d911a1175c7..94336d23623 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
@@ -109,7 +109,12 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
logger.info("--> check that old primary shard does not get promoted to primary again");
// kick reroute and wait for all shard states to be fetched
client(master).admin().cluster().prepareReroute().get();
- assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0)));
+ assertBusy(new Runnable() {
+ @Override
+ public void run() {
+ assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0));
+ }
+ });
// kick reroute a second time and check that all shards are unassigned
assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2));
}
diff --git a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java
index f4b8747ccdc..3770cd25c10 100644
--- a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java
+++ b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java
@@ -20,8 +20,7 @@ package org.elasticsearch.common;
import org.elasticsearch.test.ESTestCase;
-import java.util.EnumSet;
-
+import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.sameInstance;
@@ -33,38 +32,29 @@ public class ParseFieldTests extends ESTestCase {
String[] deprecated = new String[]{"barFoo", "bar_foo"};
ParseField withDeprecations = field.withDeprecation("Foobar", randomFrom(deprecated));
assertThat(field, not(sameInstance(withDeprecations)));
- assertThat(field.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true));
- assertThat(field.match("foo bar", ParseField.EMPTY_FLAGS), is(false));
- assertThat(field.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(false));
- assertThat(field.match("barFoo", ParseField.EMPTY_FLAGS), is(false));
+ assertThat(field.match(randomFrom(values), false), is(true));
+ assertThat(field.match("foo bar", false), is(false));
+ assertThat(field.match(randomFrom(deprecated), false), is(false));
+ assertThat(field.match("barFoo", false), is(false));
- assertThat(withDeprecations.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true));
- assertThat(withDeprecations.match("foo bar", ParseField.EMPTY_FLAGS), is(false));
- assertThat(withDeprecations.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(true));
- assertThat(withDeprecations.match("barFoo", ParseField.EMPTY_FLAGS), is(true));
+ assertThat(withDeprecations.match(randomFrom(values), false), is(true));
+ assertThat(withDeprecations.match("foo bar", false), is(false));
+ assertThat(withDeprecations.match(randomFrom(deprecated), false), is(true));
+ assertThat(withDeprecations.match("barFoo", false), is(true));
// now with strict mode
- EnumSet flags = EnumSet.of(ParseField.Flag.STRICT);
- assertThat(field.match(randomFrom(values), flags), is(true));
- assertThat(field.match("foo bar", flags), is(false));
- assertThat(field.match(randomFrom(deprecated), flags), is(false));
- assertThat(field.match("barFoo", flags), is(false));
+ assertThat(field.match(randomFrom(values), true), is(true));
+ assertThat(field.match("foo bar", true), is(false));
+ assertThat(field.match(randomFrom(deprecated), true), is(false));
+ assertThat(field.match("barFoo", true), is(false));
- assertThat(withDeprecations.match(randomFrom(values), flags), is(true));
- assertThat(withDeprecations.match("foo bar", flags), is(false));
- try {
- withDeprecations.match(randomFrom(deprecated), flags);
- fail();
- } catch (IllegalArgumentException ex) {
-
- }
-
- try {
- withDeprecations.match("barFoo", flags);
- fail();
- } catch (IllegalArgumentException ex) {
-
- }
+ assertThat(withDeprecations.match(randomFrom(values), true), is(true));
+ assertThat(withDeprecations.match("foo bar", true), is(false));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> withDeprecations.match(randomFrom(deprecated), true));
+ assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead"));
+ e = expectThrows(IllegalArgumentException.class, () -> withDeprecations.match("barFoo", true));
+ assertThat(e.getMessage(), containsString("Deprecated field [barFoo] used, expected [foo_bar] instead"));
}
public void testAllDeprecated() {
@@ -72,30 +62,29 @@ public class ParseFieldTests extends ESTestCase {
boolean withDeprecatedNames = randomBoolean();
String[] deprecated = new String[]{"text", "same_as_text"};
- String[] allValues = values;
+ String[] allValues;
if (withDeprecatedNames) {
- String[] newArray = new String[allValues.length + deprecated.length];
- System.arraycopy(allValues, 0, newArray, 0, allValues.length);
- System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length);
+ String[] newArray = new String[values.length + deprecated.length];
+ System.arraycopy(values, 0, newArray, 0, values.length);
+ System.arraycopy(deprecated, 0, newArray, values.length, deprecated.length);
allValues = newArray;
+ } else {
+ allValues = values;
}
- ParseField field = new ParseField(randomFrom(values));
+ ParseField field;
if (withDeprecatedNames) {
- field = field.withDeprecation(deprecated);
+ field = new ParseField(randomFrom(values)).withDeprecation(deprecated).withAllDeprecated("like");
+ } else {
+ field = new ParseField(randomFrom(values)).withAllDeprecated("like");
}
- field = field.withAllDeprecated("like");
// strict mode off
- assertThat(field.match(randomFrom(allValues), ParseField.EMPTY_FLAGS), is(true));
- assertThat(field.match("not a field name", ParseField.EMPTY_FLAGS), is(false));
+ assertThat(field.match(randomFrom(allValues), false), is(true));
+ assertThat(field.match("not a field name", false), is(false));
// now with strict mode
- EnumSet flags = EnumSet.of(ParseField.Flag.STRICT);
- try {
- field.match(randomFrom(allValues), flags);
- fail();
- } catch (IllegalArgumentException ex) {
- }
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> field.match(randomFrom(allValues), true));
+ assertThat(e.getMessage(), containsString(" used, replaced by [like]"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
index 6e4d3867fde..566d2148cae 100644
--- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
+++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
@@ -19,13 +19,13 @@
package org.elasticsearch.common.geo;
-import com.spatial4j.core.exception.InvalidShapeException;
-import com.spatial4j.core.shape.Circle;
-import com.spatial4j.core.shape.Rectangle;
-import com.spatial4j.core.shape.Shape;
-import com.spatial4j.core.shape.ShapeCollection;
-import com.spatial4j.core.shape.jts.JtsGeometry;
-import com.spatial4j.core.shape.jts.JtsPoint;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Circle;
+import org.locationtech.spatial4j.shape.Rectangle;
+import org.locationtech.spatial4j.shape.Shape;
+import org.locationtech.spatial4j.shape.ShapeCollection;
+import org.locationtech.spatial4j.shape.jts.JtsGeometry;
+import org.locationtech.spatial4j.shape.jts.JtsPoint;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
index 06fadffc806..abbd6ce40aa 100644
--- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
@@ -19,12 +19,12 @@
package org.elasticsearch.common.geo;
-import com.spatial4j.core.exception.InvalidShapeException;
-import com.spatial4j.core.shape.Circle;
-import com.spatial4j.core.shape.Point;
-import com.spatial4j.core.shape.Rectangle;
-import com.spatial4j.core.shape.Shape;
-import com.spatial4j.core.shape.impl.PointImpl;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Circle;
+import org.locationtech.spatial4j.shape.Point;
+import org.locationtech.spatial4j.shape.Rectangle;
+import org.locationtech.spatial4j.shape.Shape;
+import org.locationtech.spatial4j.shape.impl.PointImpl;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.Polygon;
diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java
index 305e57fbaf1..881db868ef9 100644
--- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.common.geo.builders;
-import com.spatial4j.core.shape.Rectangle;
+import org.locationtech.spatial4j.shape.Rectangle;
import com.vividsolutions.jts.geom.Coordinate;
import org.elasticsearch.test.geo.RandomShapeGenerator;
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java
deleted file mode 100644
index a4a5972e45b..00000000000
--- a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.lucene;
-
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MultiReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Explanation;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.LRUQueryCache;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.QueryCachingPolicy;
-import org.apache.lucene.search.QueryUtils;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.Weight;
-import org.apache.lucene.store.Directory;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.IOException;
-import java.util.Set;
-
-public class IndexCacheableQueryTests extends ESTestCase {
-
- static class DummyIndexCacheableQuery extends IndexCacheableQuery {
- @Override
- public String toString(String field) {
- return "DummyIndexCacheableQuery";
- }
-
- @Override
- public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
- return new Weight(this) {
-
- @Override
- public void extractTerms(Set terms) {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public Explanation explain(LeafReaderContext context, int doc) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public float getValueForNormalization() throws IOException {
- return 0;
- }
-
- @Override
- public void normalize(float norm, float topLevelBoost) {
- }
-
- @Override
- public Scorer scorer(LeafReaderContext context) throws IOException {
- return null;
- }
-
- };
- }
- }
-
- public void testBasics() throws IOException {
- DummyIndexCacheableQuery query = new DummyIndexCacheableQuery();
- QueryUtils.check(query);
-
- Query rewritten = query.rewrite(new MultiReader(new IndexReader[0]));
- QueryUtils.check(rewritten);
- QueryUtils.checkUnequal(query, rewritten);
-
- Query rewritten2 = query.rewrite(new MultiReader(new IndexReader[0]));
- QueryUtils.check(rewritten2);
- QueryUtils.checkUnequal(rewritten, rewritten2);
- }
-
- public void testCache() throws IOException {
- Directory dir = newDirectory();
- LRUQueryCache cache = new LRUQueryCache(10000, Long.MAX_VALUE);
- QueryCachingPolicy policy = QueryCachingPolicy.ALWAYS_CACHE;
- RandomIndexWriter writer = new RandomIndexWriter(getRandom(), dir);
- for (int i = 0; i < 10; ++i) {
- writer.addDocument(new Document());
- }
-
- IndexReader reader = writer.getReader();
- IndexSearcher searcher = newSearcher(reader);
- reader = searcher.getIndexReader(); // reader might be wrapped
- searcher.setQueryCache(cache);
- searcher.setQueryCachingPolicy(policy);
-
- assertEquals(0, cache.getCacheSize());
- DummyIndexCacheableQuery query = new DummyIndexCacheableQuery();
- searcher.count(query);
- int expectedCacheSize = reader.leaves().size();
- assertEquals(expectedCacheSize, cache.getCacheSize());
- searcher.count(query);
- assertEquals(expectedCacheSize, cache.getCacheSize());
-
- writer.addDocument(new Document());
-
- IndexReader reader2 = writer.getReader();
- searcher = newSearcher(reader2);
- reader2 = searcher.getIndexReader(); // reader might be wrapped
- searcher.setQueryCache(cache);
- searcher.setQueryCachingPolicy(policy);
-
- // since the query is only cacheable at the index level, it has to be recomputed on all leaves
- expectedCacheSize += reader2.leaves().size();
- searcher.count(query);
- assertEquals(expectedCacheSize, cache.getCacheSize());
- searcher.count(query);
- assertEquals(expectedCacheSize, cache.getCacheSize());
-
- reader.close();
- reader2.close();
- writer.close();
- assertEquals(0, cache.getCacheSize());
- dir.close();
- }
-
-}
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
index 484b88f096f..8df6f5c78cc 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
@@ -84,7 +84,6 @@ public class LuceneTests extends ESTestCase {
// now shadow engine should try to be created
latch.countDown();
- dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
@@ -104,7 +103,6 @@ public class LuceneTests extends ESTestCase {
public void testCleanIndex() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
- dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
@@ -130,7 +128,7 @@ public class LuceneTests extends ESTestCase {
writer.deleteDocuments(new Term("id", "2"));
writer.commit();
- try (DirectoryReader open = DirectoryReader.open(writer, true)) {
+ try (DirectoryReader open = DirectoryReader.open(writer)) {
assertEquals(3, open.numDocs());
assertEquals(1, open.numDeletedDocs());
assertEquals(4, open.maxDoc());
@@ -158,7 +156,6 @@ public class LuceneTests extends ESTestCase {
public void testPruneUnreferencedFiles() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
- dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
@@ -186,7 +183,7 @@ public class LuceneTests extends ESTestCase {
writer.deleteDocuments(new Term("id", "2"));
writer.commit();
- DirectoryReader open = DirectoryReader.open(writer, true);
+ DirectoryReader open = DirectoryReader.open(writer);
assertEquals(3, open.numDocs());
assertEquals(1, open.numDeletedDocs());
assertEquals(4, open.maxDoc());
@@ -215,7 +212,6 @@ public class LuceneTests extends ESTestCase {
public void testFiles() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
- dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
@@ -279,7 +275,6 @@ public class LuceneTests extends ESTestCase {
public void testNumDocs() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
- dir.setEnableVirusScanner(false);
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
@@ -369,6 +364,6 @@ public class LuceneTests extends ESTestCase {
*/
public void testMMapHackSupported() throws Exception {
// add assume's here if needed for certain platforms, but we should know if it does not work.
- assertTrue(MMapDirectory.UNMAP_SUPPORTED);
+ assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED);
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java
index 9e7f54b8323..f2dc53e44cd 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java
@@ -152,7 +152,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
Query query = new AllTermQuery(new Term("_all", "else"));
@@ -198,7 +198,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
// this one is boosted. so the second doc is more relevant
@@ -244,7 +244,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
assertEquals(2, reader.leaves().size());
IndexSearcher searcher = new IndexSearcher(reader);
@@ -280,7 +280,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
@@ -330,7 +330,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
@@ -368,7 +368,7 @@ public class SimpleAllTests extends ESTestCase {
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10);
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
index 7fb3ec0c2e9..817dabfece3 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
@@ -55,7 +55,7 @@ public class ESDirectoryReaderTests extends ESTestCase {
// open reader
ShardId shardId = new ShardId("fake", "_na_", 1);
- DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw, true), shardId);
+ DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId);
assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size());
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java
index 51d2ba77ec5..3d1b0fdb842 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java
@@ -137,7 +137,7 @@ public class FreqTermsEnumTests extends ESTestCase {
}
// now go over each doc, build the relevant references and filter
- reader = DirectoryReader.open(iw, true);
+ reader = DirectoryReader.open(iw);
List filterTerms = new ArrayList<>();
for (int docId = 0; docId < reader.maxDoc(); docId++) {
Document doc = reader.document(docId);
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
index 9098289847e..23b6939fe7a 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
@@ -27,15 +27,12 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.instanceOf;
public class MultiPhrasePrefixQueryTests extends ESTestCase {
public void testSimple() throws Exception {
@@ -43,7 +40,7 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase {
Document doc = new Document();
doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
writer.addDocument(doc);
- IndexReader reader = DirectoryReader.open(writer, true);
+ IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = new IndexSearcher(reader);
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery();
@@ -66,22 +63,4 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase {
query.add(new Term("field", "xxx"));
assertThat(searcher.count(query), equalTo(0));
}
-
- public void testBoost() throws Exception {
- IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- Document doc = new Document();
- doc.add(new Field("field", "aaa bbb", TextField.TYPE_NOT_STORED));
- writer.addDocument(doc);
- doc = new Document();
- doc.add(new Field("field", "ccc ddd", TextField.TYPE_NOT_STORED));
- writer.addDocument(doc);
- IndexReader reader = DirectoryReader.open(writer, true);
- MultiPhrasePrefixQuery multiPhrasePrefixQuery = new MultiPhrasePrefixQuery();
- multiPhrasePrefixQuery.add(new Term[]{new Term("field", "aaa"), new Term("field", "bb")});
- multiPhrasePrefixQuery.setBoost(randomFloat());
- Query query = multiPhrasePrefixQuery.rewrite(reader);
- assertThat(query, instanceOf(BoostQuery.class));
- BoostQuery boostQuery = (BoostQuery) query;
- assertThat(boostQuery.getBoost(), equalTo(multiPhrasePrefixQuery.getBoost()));
- }
}
\ No newline at end of file
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
index b0e2ea873c4..0dcce74c1d2 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
@@ -54,7 +54,7 @@ public class MoreLikeThisQueryTests extends ESTestCase {
document.add(new TextField("text", "lucene release", Field.Store.YES));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[]{"text"}, Lucene.STANDARD_ANALYZER);
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java
index 1340d11616c..72b6b2b5eec 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java
@@ -51,7 +51,7 @@ public class VersionLookupTests extends ESTestCase {
doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader());
// found doc
@@ -79,7 +79,7 @@ public class VersionLookupTests extends ESTestCase {
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
writer.addDocument(doc);
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
LeafReaderContext segment = reader.leaves().get(0);
PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader());
// return the last doc when there are duplicates
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
index 1221445e8a6..7f405ea0531 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
@@ -78,7 +78,7 @@ public class VersionsTests extends ESTestCase {
public void testVersions() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
Document doc = new Document();
@@ -145,7 +145,7 @@ public class VersionsTests extends ESTestCase {
docs.add(doc);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5L));
assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5L));
@@ -170,7 +170,7 @@ public class VersionsTests extends ESTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
Document doc = new Document();
@@ -305,7 +305,7 @@ public class VersionsTests extends ESTestCase {
doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
// should increase cache size by 1
assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
assertEquals(size+1, Versions.lookupStates.size());
@@ -330,7 +330,7 @@ public class VersionsTests extends ESTestCase {
doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
assertEquals(size+1, Versions.lookupStates.size());
// now wrap the reader
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index 6cc9912924d..a190de5b702 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -216,6 +216,13 @@ public class ScopedSettingsTests extends ESTestCase {
} catch (IllegalArgumentException e) {
assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage());
}
+
+ try {
+ settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build());
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertEquals("illegal value for [index.similarity.classic] cannot redefine built-in similarity", e.getMessage());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java
index 9c702acb2c4..1455b397e74 100644
--- a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java
+++ b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java
@@ -22,7 +22,7 @@ package org.elasticsearch.deps.lucene;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
@@ -51,7 +51,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.test.ESTestCase;
@@ -74,9 +74,9 @@ public class SimpleLuceneTests extends ESTestCase {
document.add(new SortedDocValuesField("str", new BytesRef(text)));
indexWriter.addDocument(document);
}
- IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter, true));
+ IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter));
IndexSearcher searcher = new IndexSearcher(reader);
- TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, 10, new Sort(new SortField("str", SortField.Type.STRING)));
+ TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING)));
for (int i = 0; i < 10; i++) {
FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i];
assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)}))));
@@ -89,10 +89,10 @@ public class SimpleLuceneTests extends ESTestCase {
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
- document.add(new IntField("test", 2, IntField.TYPE_STORED));
+ document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
Document doc = searcher.doc(topDocs.scoreDocs[0].doc);
@@ -100,7 +100,7 @@ public class SimpleLuceneTests extends ESTestCase {
assertThat(f.stringValue(), equalTo("2"));
BytesRefBuilder bytes = new BytesRefBuilder();
- NumericUtils.intToPrefixCoded(2, 0, bytes);
+ LegacyNumericUtils.intToPrefixCoded(2, 0, bytes);
topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1);
doc = searcher.doc(topDocs.scoreDocs[0].doc);
f = doc.getField("test");
@@ -123,7 +123,7 @@ public class SimpleLuceneTests extends ESTestCase {
document.add(new TextField("#id", "1", Field.Store.YES));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
final ArrayList fieldsOrder = new ArrayList<>();
@@ -162,7 +162,7 @@ public class SimpleLuceneTests extends ESTestCase {
indexWriter.addDocument(document);
}
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TermQuery query = new TermQuery(new Term("value", "value"));
TopDocs topDocs = searcher.search(query, 100);
@@ -179,7 +179,7 @@ public class SimpleLuceneTests extends ESTestCase {
public void testNRTSearchOnClosedWriter() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- DirectoryReader reader = DirectoryReader.open(indexWriter, true);
+ DirectoryReader reader = DirectoryReader.open(indexWriter);
for (int i = 0; i < 100; i++) {
Document document = new Document();
@@ -205,26 +205,26 @@ public class SimpleLuceneTests extends ESTestCase {
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
Document doc = new Document();
- FieldType type = IntField.TYPE_NOT_STORED;
- IntField field = new IntField("int1", 1, type);
+ FieldType type = LegacyIntField.TYPE_NOT_STORED;
+ LegacyIntField field = new LegacyIntField("int1", 1, type);
doc.add(field);
- type = new FieldType(IntField.TYPE_NOT_STORED);
+ type = new FieldType(LegacyIntField.TYPE_NOT_STORED);
type.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
type.freeze();
- field = new IntField("int1", 1, type);
+ field = new LegacyIntField("int1", 1, type);
doc.add(field);
- field = new IntField("int2", 1, type);
+ field = new LegacyIntField("int2", 1, type);
doc.add(field);
- field = new IntField("int2", 1, type);
+ field = new LegacyIntField("int2", 1, type);
doc.add(field);
indexWriter.addDocument(doc);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
LeafReader atomicReader = SlowCompositeReaderWrapper.wrap(reader);
Terms terms = atomicReader.terms("int1");
diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java
index 66dc0542678..fbb5115903c 100644
--- a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java
+++ b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.deps.lucene;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -53,10 +54,14 @@ public class VectorHighlighterTests extends ESTestCase {
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
- document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+ FieldType vectorsType = new FieldType(TextField.TYPE_STORED);
+ vectorsType.setStoreTermVectors(true);
+ vectorsType.setStoreTermVectorPositions(true);
+ vectorsType.setStoreTermVectorOffsets(true);
+ document.add(new Field("content", "the big bad dog", vectorsType));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
@@ -75,10 +80,14 @@ public class VectorHighlighterTests extends ESTestCase {
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
- document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+ FieldType vectorsType = new FieldType(TextField.TYPE_STORED);
+ vectorsType.setStoreTermVectors(true);
+ vectorsType.setStoreTermVectorPositions(true);
+ vectorsType.setStoreTermVectorOffsets(true);
+ document.add(new Field("content", "the big bad dog", vectorsType));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
@@ -87,12 +96,12 @@ public class VectorHighlighterTests extends ESTestCase {
FastVectorHighlighter highlighter = new FastVectorHighlighter();
PrefixQuery prefixQuery = new PrefixQuery(new Term("content", "ba"));
- assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName()));
+ assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName()));
String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(prefixQuery),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, nullValue());
- prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+ prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_REWRITE);
Query rewriteQuery = prefixQuery.rewrite(reader);
fragment = highlighter.getBestFragment(highlighter.getFieldQuery(rewriteQuery),
reader, topDocs.scoreDocs[0].doc, "content", 30);
@@ -100,7 +109,7 @@ public class VectorHighlighterTests extends ESTestCase {
// now check with the custom field query
prefixQuery = new PrefixQuery(new Term("content", "ba"));
- assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName()));
+ assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName()));
fragment = highlighter.getBestFragment(new CustomFieldQuery(prefixQuery, reader, highlighter),
reader, topDocs.scoreDocs[0].doc, "content", 30);
assertThat(fragment, notNullValue());
@@ -112,10 +121,14 @@ public class VectorHighlighterTests extends ESTestCase {
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
- document.add(new Field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+ FieldType vectorsType = new FieldType(TextField.TYPE_NOT_STORED);
+ vectorsType.setStoreTermVectors(true);
+ vectorsType.setStoreTermVectorPositions(true);
+ vectorsType.setStoreTermVectorOffsets(true);
+ document.add(new Field("content", "the big bad dog", vectorsType));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
@@ -133,10 +146,10 @@ public class VectorHighlighterTests extends ESTestCase {
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
- document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
+ document.add(new TextField("content", "the big bad dog", Field.Store.YES));
indexWriter.addDocument(document);
- IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
index ee92945c4ff..9ad10cc3888 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
@@ -275,10 +275,10 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
Settings nodeSettings = Settings.settingsBuilder()
.put("discovery.type", "zen") // <-- To override the local setting if set externally
.build();
- String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1);
+ String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0);
ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName);
ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName);
- DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0);
+ DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_2_0_0);
final AtomicReference holder = new AtomicReference<>();
zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() {
@Override
@@ -292,16 +292,16 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
});
assertThat(holder.get(), notNullValue());
- assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [1.6.0] that is lower than the minimum compatible version [" + Version.V_2_0_0_beta1.minimumCompatibilityVersion() + "]"));
+ assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0.minimumCompatibilityVersion() + "]"));
}
public void testJoinElectedMaster_incompatibleMinVersion() {
- ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0_beta1);
+ ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0);
- DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0_beta1);
+ DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0);
assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node));
- node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0);
- assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
+ node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0);
+ assertThat("Can't join master because version 2.0.0 is lower than the minimum compatable version 5.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
}
public void testDiscoveryStats() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
index b247dad069e..88d375699a1 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
@@ -33,7 +33,6 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.ping.PingContextProvider;
import org.elasticsearch.discovery.zen.ping.ZenPing;
-import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -82,11 +81,6 @@ public class UnicastZenPingIT extends ESTestCase {
return DiscoveryNodes.builder().put(nodeA).localNodeId("UZP_A").build();
}
- @Override
- public NodeService nodeService() {
- return null;
- }
-
@Override
public boolean nodeHasJoinedClusterOnce() {
return false;
@@ -101,11 +95,6 @@ public class UnicastZenPingIT extends ESTestCase {
return DiscoveryNodes.builder().put(nodeB).localNodeId("UZP_B").build();
}
- @Override
- public NodeService nodeService() {
- return null;
- }
-
@Override
public boolean nodeHasJoinedClusterOnce() {
return true;
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
index 224ecbdf619..7e31f6055de 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
@@ -43,7 +43,6 @@ import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
import org.elasticsearch.node.Node;
-import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
@@ -134,11 +133,6 @@ public class PublishClusterStateActionTests extends ESTestCase {
return clusterState.nodes();
}
- @Override
- public NodeService nodeService() {
- assert false;
- throw new UnsupportedOperationException("Shouldn't be here");
- }
}
public MockNode createMockNode(final String name) throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java
index 68a4df685be..64d293e8bd0 100644
--- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java
+++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java
@@ -65,11 +65,6 @@ import static org.hamcrest.Matchers.startsWith;
public class GetActionIT extends ESIntegTestCase {
- @Override
- protected Collection> nodePlugins() {
- return pluginList(InternalSettingsPlugin.class); // uses index.version.created
- }
-
public void testSimpleGet() {
assertAcked(prepareCreate("test")
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
@@ -324,128 +319,6 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
}
- public void testThatGetFromTranslogShouldWorkWithExcludeBackcompat() throws Exception {
- String index = "test";
- String type = "type1";
-
- String mapping = jsonBuilder()
- .startObject()
- .startObject(type)
- .startObject("_source")
- .array("excludes", "excluded")
- .endObject()
- .endObject()
- .endObject()
- .string();
-
- assertAcked(prepareCreate(index)
- .addMapping(type, mapping)
- .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
-
- client().prepareIndex(index, type, "1")
- .setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject())
- .get();
-
- GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get();
- client().admin().indices().prepareFlush(index).get();
- GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get();
-
- assertThat(responseBeforeFlush.isExists(), is(true));
- assertThat(responseAfterFlush.isExists(), is(true));
- assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field"));
- assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded")));
- assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
- }
-
- public void testThatGetFromTranslogShouldWorkWithIncludeBackcompat() throws Exception {
- String index = "test";
- String type = "type1";
-
- String mapping = jsonBuilder()
- .startObject()
- .startObject(type)
- .startObject("_source")
- .array("includes", "included")
- .endObject()
- .endObject()
- .endObject()
- .string();
-
- assertAcked(prepareCreate(index)
- .addMapping(type, mapping)
- .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
-
- client().prepareIndex(index, type, "1")
- .setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject())
- .get();
-
- GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get();
- flush();
- GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get();
-
- assertThat(responseBeforeFlush.isExists(), is(true));
- assertThat(responseAfterFlush.isExists(), is(true));
- assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field")));
- assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included"));
- assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
- }
-
- @SuppressWarnings("unchecked")
- public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFieldsBackcompat() throws Exception {
- String index = "test";
- String type = "type1";
-
- String mapping = jsonBuilder()
- .startObject()
- .startObject(type)
- .startObject("_source")
- .array("includes", "included")
- .array("excludes", "excluded")
- .endObject()
- .endObject()
- .endObject()
- .string();
-
- assertAcked(prepareCreate(index)
- .addMapping(type, mapping)
- .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id));
-
- client().prepareIndex(index, type, "1")
- .setSource(jsonBuilder().startObject()
- .field("field", "1", "2")
- .startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject()
- .startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject()
- .endObject())
- .get();
-
- GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get();
- assertThat(responseBeforeFlush.isExists(), is(true));
- assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded")));
- assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field")));
- assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included"));
-
- // now tests that extra source filtering works as expected
- GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field")
- .setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get();
- assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true));
- assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded")));
- assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field")));
- assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included"));
- assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field"));
- assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2")));
-
- flush();
- GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get();
- GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field")
- .setFetchSource("*.field", "*.field2").get();
-
- assertThat(responseAfterFlush.isExists(), is(true));
- assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));
-
- assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true));
- assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString()));
- }
-
public void testGetWithVersion() {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)));
@@ -1002,12 +875,11 @@ public class GetActionIT extends ESIntegTestCase {
void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) {
- String storedString = stored ? "yes" : "no";
+ String storedString = stored ? "true" : "false";
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
- " \"refresh_interval\": \"-1\",\n" +
- " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
+ " \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
@@ -1054,12 +926,11 @@ public class GetActionIT extends ESIntegTestCase {
}
void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) {
- String storedString = stored ? "yes" : "no";
+ String storedString = stored ? "true" : "false";
String createIndexSource = "{\n" +
" \"settings\": {\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
- " \"refresh_interval\": \"-1\",\n" +
- " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
+ " \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
diff --git a/core/src/test/java/org/elasticsearch/index/IndexTests.java b/core/src/test/java/org/elasticsearch/index/IndexTests.java
new file mode 100644
index 00000000000..6ce38c6acba
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/index/IndexTests.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index;
+
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.apache.lucene.util.TestUtil.randomSimpleString;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.not;
+
+public class IndexTests extends ESTestCase {
+ public void testToString() {
+ assertEquals("[name/uuid]", new Index("name", "uuid").toString());
+ assertEquals("[name]", new Index("name", ClusterState.UNKNOWN_UUID).toString());
+
+ Index random = new Index(randomSimpleString(random(), 1, 100),
+ usually() ? Strings.randomBase64UUID(random()) : ClusterState.UNKNOWN_UUID);
+ assertThat(random.toString(), containsString(random.getName()));
+ if (ClusterState.UNKNOWN_UUID.equals(random.getUUID())) {
+ assertThat(random.toString(), not(containsString(random.getUUID())));
+ } else {
+ assertThat(random.toString(), containsString(random.getUUID()));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
index 8fd6876b4b2..e3676366511 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
@@ -20,7 +20,7 @@
package org.elasticsearch.index;
import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.StringField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not;
public class IndexingSlowLogTests extends ESTestCase {
public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes();
- ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new IntField("version", 1, Store.YES), "id",
+ ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id",
"test", null, 0, -1, null, source, null);
// Turning off document logging doesn't log source[]
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
index 1eb1e93f09c..6468fae9397 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
@@ -28,6 +28,8 @@ import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
import org.apache.lucene.analysis.hunspell.Dictionary;
import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.SimpleFSDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.ModuleTestCase;
@@ -106,7 +108,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
Settings settings2 = settingsBuilder()
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0)
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings2);
AnalysisService analysisService2 = getAnalysisService(newRegistry, settings2);
@@ -119,8 +121,8 @@ public class AnalysisModuleTests extends ModuleTestCase {
// analysis service has the expected version
assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
- assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion());
- assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion());
+ assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion());
+ assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion());
assertThat(analysisService2.analyzer("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion());
@@ -268,45 +270,6 @@ public class AnalysisModuleTests extends ModuleTestCase {
}
}
- public void testBackwardCompatible() throws IOException {
- Settings settings = settingsBuilder()
- .put("index.analysis.analyzer.custom1.tokenizer", "standard")
- .put("index.analysis.analyzer.custom1.position_offset_gap", "128")
- .put("index.analysis.analyzer.custom2.tokenizer", "standard")
- .put("index.analysis.analyzer.custom2.position_increment_gap", "256")
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
- Version.V_1_7_1))
- .build();
- AnalysisService analysisService = getAnalysisService(settings);
-
- Analyzer custom1 = analysisService.analyzer("custom1").analyzer();
- assertThat(custom1, instanceOf(CustomAnalyzer.class));
- assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128));
-
- Analyzer custom2 = analysisService.analyzer("custom2").analyzer();
- assertThat(custom2, instanceOf(CustomAnalyzer.class));
- assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256));
- }
-
- public void testWithBothSettings() throws IOException {
- Settings settings = settingsBuilder()
- .put("index.analysis.analyzer.custom.tokenizer", "standard")
- .put("index.analysis.analyzer.custom.position_offset_gap", "128")
- .put("index.analysis.analyzer.custom.position_increment_gap", "256")
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
- Version.V_1_7_1))
- .build();
- try {
- getAnalysisService(settings);
- fail("Analyzer has both position_offset_gap and position_increment_gap should fail");
- } catch (IllegalArgumentException e) {
- assertThat(e.getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" +
- ", use only [position_increment_gap]"));
- }
- }
-
public void testDeprecatedPositionOffsetGap() throws IOException {
Settings settings = settingsBuilder()
.put("index.analysis.analyzer.custom.tokenizer", "standard")
@@ -328,11 +291,14 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- AnalysisModule module = new AnalysisModule(new Environment(settings));
+ Environment environment = new Environment(settings);
+ AnalysisModule module = new AnalysisModule(environment);
InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff");
InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic");
- Dictionary dictionary = new Dictionary(aff, dic);
- module.registerHunspellDictionary("foo", dictionary);
- assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary);
+ try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) {
+ Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic);
+ module.registerHunspellDictionary("foo", dictionary);
+ assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary);
+ }
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java
deleted file mode 100644
index a163d9e42b4..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.test.ESTokenStreamTestCase;
-
-import java.io.IOException;
-
-import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween;
-import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
-
-public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase {
-
- private void assertNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException {
- final int iters = scaledRandomIntBetween(10, 100);
- org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT;
- for (int i = 0; i < iters; i++) {
- Settings.Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop");
- if (version.onOrAfter(noStopwordVersion)) {
- if (random().nextBoolean()) {
- builder.put(SETTING_VERSION_CREATED, version);
- }
- } else {
- builder.put(SETTING_VERSION_CREATED, version);
- }
- builder.put("index.analysis.analyzer.foo.type", type);
- builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
- NamedAnalyzer analyzer = analysisService.analyzer("foo");
- assertNotNull(analyzer);
- if (version.onOrAfter(noStopwordVersion)) {
- assertAnalyzesTo(analyzer, "this is bogus", new String[]{"this", "is", "bogus"});
- } else {
- assertAnalyzesTo(analyzer, "this is bogus", new String[]{"bogus"});
- }
- version = randomVersion();
- }
- }
-
- public void testPatternAnalyzer() throws IOException {
- assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "pattern");
- }
-
- public void testStandardHTMLStripAnalyzer() throws IOException {
- assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "standard_html_strip");
- }
-
- public void testStandardAnalyzer() throws IOException {
- assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_Beta1, "standard");
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
index 9d8efb1de4b..5e1cf2e8179 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
@@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
-import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -120,45 +119,20 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
final Index index = new Index("test", "_na_");
final String name = "ngr";
Version v = randomVersion(random());
- if (v.onOrAfter(Version.V_0_90_2)) {
- Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3);
- boolean compatVersion = false;
- if ((compatVersion = random().nextBoolean())) {
- builder.put("version", "4." + random().nextInt(3));
- }
- boolean reverse = random().nextBoolean();
- if (reverse) {
- builder.put("side", "back");
- }
- Settings settings = builder.build();
- Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build();
- Tokenizer tokenizer = new MockTokenizer();
- tokenizer.setReader(new StringReader("foo bar"));
- TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer);
- if (reverse) {
- assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class));
- } else if (compatVersion) {
- assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class));
- } else {
- assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class));
- }
-
+ Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3);
+ boolean reverse = random().nextBoolean();
+ if (reverse) {
+ builder.put("side", "back");
+ }
+ Settings settings = builder.build();
+ Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build();
+ Tokenizer tokenizer = new MockTokenizer();
+ tokenizer.setReader(new StringReader("foo bar"));
+ TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer);
+ if (reverse) {
+ assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class));
} else {
- Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3);
- boolean reverse = random().nextBoolean();
- if (reverse) {
- builder.put("side", "back");
- }
- Settings settings = builder.build();
- Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build();
- Tokenizer tokenizer = new MockTokenizer();
- tokenizer.setReader(new StringReader("foo bar"));
- TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer);
- if (reverse) {
- assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class));
- } else {
- assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class));
- }
+ assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java
index 89940558d51..10d3d3554dd 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
-import org.apache.lucene.analysis.NumericTokenStream;
-import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute;
+import org.apache.lucene.analysis.LegacyNumericTokenStream;
+import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.test.ESTestCase;
@@ -37,10 +37,10 @@ public class NumericAnalyzerTests extends ESTestCase {
NumericDoubleAnalyzer analyzer = new NumericDoubleAnalyzer(precisionStep);
final TokenStream ts1 = analyzer.tokenStream("dummy", String.valueOf(value));
- final NumericTokenStream ts2 = new NumericTokenStream(precisionStep);
+ final LegacyNumericTokenStream ts2 = new LegacyNumericTokenStream(precisionStep);
ts2.setDoubleValue(value);
- final NumericTermAttribute numTerm1 = ts1.addAttribute(NumericTermAttribute.class);
- final NumericTermAttribute numTerm2 = ts1.addAttribute(NumericTermAttribute.class);
+ final LegacyNumericTermAttribute numTerm1 = ts1.addAttribute(LegacyNumericTermAttribute.class);
+ final LegacyNumericTermAttribute numTerm2 = ts1.addAttribute(LegacyNumericTermAttribute.class);
final PositionIncrementAttribute posInc1 = ts1.addAttribute(PositionIncrementAttribute.class);
final PositionIncrementAttribute posInc2 = ts1.addAttribute(PositionIncrementAttribute.class);
ts1.reset();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java
deleted file mode 100644
index 2cb8f99e7b8..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
-import org.elasticsearch.test.ESTestCase;
-
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.not;
-
-/**
- *
- */
-public class PreBuiltAnalyzerProviderFactoryTests extends ESTestCase {
- public void testVersioningInFactoryProvider() throws Exception {
- PreBuiltAnalyzerProviderFactory factory = new PreBuiltAnalyzerProviderFactory("default", AnalyzerScope.INDEX, PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
-
- AnalyzerProvider former090AnalyzerProvider = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
- AnalyzerProvider currentAnalyzerProviderReference = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
-
- // would love to access the version inside of the lucene analyzer, but that is not possible...
- assertThat(currentAnalyzerProviderReference, is(not(former090AnalyzerProvider)));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
index fbb69ea1eb0..06a242c8277 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java
@@ -59,20 +59,18 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() {
assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT),
- is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_0_18_0)));
+ is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_2_0_0)));
}
public void testThatInstancesAreCachedAndReused() {
- assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
- is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)));
- assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0),
- is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0)));
- }
+ assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
+ PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT));
+ // same lucene version should be cached
+ assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0),
+ PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_1));
- public void testThatInstancesWithSameLuceneVersionAreReused() {
- // both are lucene 4.4 and should return the same instance
- assertThat(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_4),
- is(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_5)));
+ assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0),
+ PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0));
}
public void testThatAnalyzersAreUsedInMapping() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java
deleted file mode 100644
index 39de728a484..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.indices.analysis.PreBuiltCharFilters;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.IOException;
-
-import static org.hamcrest.CoreMatchers.is;
-
-/**
- *
- */
-public class PreBuiltCharFilterFactoryFactoryTests extends ESTestCase {
- public void testThatDifferentVersionsCanBeLoaded() throws IOException {
- PreBuiltCharFilterFactoryFactory factory = new PreBuiltCharFilterFactoryFactory(PreBuiltCharFilters.HTML_STRIP.getCharFilterFactory(Version.CURRENT));
-
- CharFilterFactory former090TokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
- CharFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build());
- CharFilterFactory currentTokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
-
- assertThat(currentTokenizerFactory, is(former090TokenizerFactory));
- assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java
deleted file mode 100644
index 670df069926..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.indices.analysis.PreBuiltTokenFilters;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.IOException;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.not;
-
-/**
- *
- */
-public class PreBuiltTokenFilterFactoryFactoryTests extends ESTestCase {
- public void testThatCachingWorksForCachingStrategyOne() throws IOException {
- PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.WORD_DELIMITER.getTokenFilterFactory(Version.CURRENT));
-
- TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
- TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
- TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
-
- assertThat(currentTokenizerFactory, is(former090TokenizerFactory));
- assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy));
- }
-
- public void testThatDifferentVersionsCanBeLoaded() throws IOException {
- PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.STOP.getTokenFilterFactory(Version.CURRENT));
-
- TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
- TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
- TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
-
- assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory)));
- assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java
deleted file mode 100644
index 162dbb36424..00000000000
--- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.index.analysis;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.indices.analysis.PreBuiltTokenizers;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.IOException;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.not;
-
-/**
- *
- */
-public class PreBuiltTokenizerFactoryFactoryTests extends ESTestCase {
- public void testThatDifferentVersionsCanBeLoaded() throws IOException {
- PreBuiltTokenizerFactoryFactory factory = new PreBuiltTokenizerFactoryFactory(PreBuiltTokenizers.STANDARD.getTokenizerFactory(Version.CURRENT));
-
- // different es versions, same lucene version, thus cached
- TokenizerFactory former090TokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build());
- TokenizerFactory former090TokenizerFactoryCopy = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build());
- TokenizerFactory currentTokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
-
- assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory)));
- assertThat(currentTokenizerFactory, is(not(former090TokenizerFactoryCopy)));
- assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
index 37844dce69d..f0a6077b497 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
@@ -40,10 +40,9 @@ import static org.hamcrest.Matchers.instanceOf;
*
*/
public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
- public void testEnglishBackwardsCompatibility() throws IOException {
+ public void testEnglishFilterFactory() throws IOException {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
-
Version v = VersionUtils.randomVersion(random());
Settings settings = Settings.settingsBuilder()
.put("index.analysis.filter.my_english.type", "stemmer")
@@ -61,19 +60,13 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
NamedAnalyzer analyzer = analysisService.analyzer("my_english");
-
- if (v.onOrAfter(Version.V_1_3_0)) {
- assertThat(create, instanceOf(PorterStemFilter.class));
- assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
- } else {
- assertThat(create, instanceOf(SnowballFilter.class));
- assertAnalyzesTo(analyzer, "consolingly", new String[]{"consol"});
- }
+ assertThat(create, instanceOf(PorterStemFilter.class));
+ assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
}
}
- public void testPorter2BackwardsCompatibility() throws IOException {
+ public void testPorter2FilterFactory() throws IOException {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
@@ -95,12 +88,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
TokenStream create = tokenFilter.create(tokenizer);
NamedAnalyzer analyzer = analysisService.analyzer("my_porter2");
assertThat(create, instanceOf(SnowballFilter.class));
-
- if (v.onOrAfter(Version.V_1_3_0)) {
- assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"});
- } else {
- assertAnalyzesTo(analyzer, "possibly", new String[]{"possibli"});
- }
+ assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"});
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
index 2804f522afa..d319ab44319 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
@@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.Lucene43StopFilter;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter;
@@ -57,14 +56,8 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
public void testCorrectPositionIncrementSetting() throws IOException {
Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop");
- int thingToDo = random().nextInt(3);
- if (thingToDo == 0) {
+ if (random().nextBoolean()) {
builder.put("index.analysis.filter.my_stop.version", Version.LATEST);
- } else if (thingToDo == 1) {
- builder.put("index.analysis.filter.my_stop.version", Version.LUCENE_4_0);
- if (random().nextBoolean()) {
- builder.put("index.analysis.filter.my_stop.enable_position_increments", true);
- }
} else {
// don't specify
}
@@ -75,27 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
- if (thingToDo == 1) {
- assertThat(create, instanceOf(Lucene43StopFilter.class));
- } else {
- assertThat(create, instanceOf(StopFilter.class));
- }
- }
-
- public void testDeprecatedPositionIncrementSettingWithVersions() throws IOException {
- Settings settings = Settings.settingsBuilder()
- .put("index.analysis.filter.my_stop.type", "stop")
- .put("index.analysis.filter.my_stop.enable_position_increments", false)
- .put("index.analysis.filter.my_stop.version", "4.3")
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
- assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
- Tokenizer tokenizer = new WhitespaceTokenizer();
- tokenizer.setReader(new StringReader("foo bar"));
- TokenStream create = tokenFilter.create(tokenizer);
- assertThat(create, instanceOf(Lucene43StopFilter.class));
+ assertThat(create, instanceOf(StopFilter.class));
}
public void testThatSuggestStopFilterWorks() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
index a041694dde6..c23875f8a9a 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
@@ -146,23 +146,4 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
tokenizer.setReader(new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
-
- /** Back compat:
- * old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */
- public void testDeprecatedPartsAndCatenate() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
- .put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
- .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
- .put("index.analysis.filter.my_word_delimiter.version", "4.7")
- .build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
- String source = "PowerShot";
- String[] expected = new String[]{"Power", "Shot", "PowerShot" };
- Tokenizer tokenizer = new WhitespaceTokenizer();
- tokenizer.setReader(new StringReader(source));
- assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
- }
-
}
diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
index 18714fe61ef..e82ed61fbed 100644
--- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
@@ -87,7 +87,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.addDocument(document);
writer.commit();
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
IndexSearcher searcher = new IndexSearcher(reader);
@@ -112,7 +112,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.forceMerge(1);
reader.close();
- reader = DirectoryReader.open(writer, false);
+ reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
searcher = new IndexSearcher(reader);
@@ -138,7 +138,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
document.add(new StringField("field", "value", Field.Store.NO));
writer.addDocument(document);
writer.commit();
- final DirectoryReader writerReader = DirectoryReader.open(writer, false);
+ final DirectoryReader writerReader = DirectoryReader.open(writer);
final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0));
final AtomicLong stats = new AtomicLong();
@@ -211,7 +211,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
newIndexWriterConfig()
);
writer.addDocument(new Document());
- DirectoryReader reader = DirectoryReader.open(writer, true);
+ DirectoryReader reader = DirectoryReader.open(writer);
writer.close();
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0));
diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
index 3d912d41c38..4fb31bb4ea9 100644
--- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
+++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
@@ -20,18 +20,12 @@
package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.lucene40.Lucene40Codec;
-import org.apache.lucene.codecs.lucene41.Lucene41Codec;
-import org.apache.lucene.codecs.lucene410.Lucene410Codec;
-import org.apache.lucene.codecs.lucene42.Lucene42Codec;
-import org.apache.lucene.codecs.lucene45.Lucene45Codec;
-import org.apache.lucene.codecs.lucene46.Lucene46Codec;
-import org.apache.lucene.codecs.lucene49.Lucene49Codec;
import org.apache.lucene.codecs.lucene50.Lucene50Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene53.Lucene53Codec;
import org.apache.lucene.codecs.lucene54.Lucene54Codec;
+import org.apache.lucene.codecs.lucene60.Lucene60Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@@ -64,16 +58,10 @@ public class CodecTests extends ESTestCase {
public void testResolveDefaultCodecs() throws Exception {
CodecService codecService = createCodecService();
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
- assertThat(codecService.codec("default"), instanceOf(Lucene54Codec.class));
+ assertThat(codecService.codec("default"), instanceOf(Lucene60Codec.class));
+ assertThat(codecService.codec("Lucene54"), instanceOf(Lucene54Codec.class));
assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class));
assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class));
- assertThat(codecService.codec("Lucene410"), instanceOf(Lucene410Codec.class));
- assertThat(codecService.codec("Lucene49"), instanceOf(Lucene49Codec.class));
- assertThat(codecService.codec("Lucene46"), instanceOf(Lucene46Codec.class));
- assertThat(codecService.codec("Lucene45"), instanceOf(Lucene45Codec.class));
- assertThat(codecService.codec("Lucene40"), instanceOf(Lucene40Codec.class));
- assertThat(codecService.codec("Lucene41"), instanceOf(Lucene41Codec.class));
- assertThat(codecService.codec("Lucene42"), instanceOf(Lucene42Codec.class));
}
public void testDefault() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java b/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java
deleted file mode 100644
index 8d9c313a9a2..00000000000
--- a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-package org.elasticsearch.index.engine;
-
-import org.apache.lucene.index.SegmentInfos;
-import org.elasticsearch.common.io.stream.InputStreamStreamInput;
-import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-import static org.elasticsearch.test.VersionUtils.randomVersion;
-
-
-public class CommitStatsTests extends ESTestCase {
- public void testStreamingWithNullId() throws IOException {
- SegmentInfos segmentInfos = new SegmentInfos();
- CommitStats commitStats = new CommitStats(segmentInfos);
- org.elasticsearch.Version targetNodeVersion = randomVersion(random());
-
- ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
- OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
- out.setVersion(targetNodeVersion);
- commitStats.writeTo(out);
-
- ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
- InputStreamStreamInput in = new InputStreamStreamInput(inBuffer);
- in.setVersion(targetNodeVersion);
- CommitStats readCommitStats = CommitStats.readCommitStatsFrom(in);
- assertNull(readCommitStats.getId());
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
index 37e530cc7f4..b6ae9948675 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
@@ -170,7 +170,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
assertValues(bytesValues, 1, one());
assertValues(bytesValues, 2, three());
- IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
+ IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null))));
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs.length, equalTo(3));
@@ -226,7 +226,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
fillExtendedMvSet();
IndexFieldData indexFieldData = getForField("value");
- IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
+ IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10,
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null))));
assertThat(topDocs.totalHits, equalTo(8));
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
index 5c229545755..6f8b5a45df0 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
@@ -150,7 +150,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
if (readerContext != null) {
readerContext.reader().close();
}
- topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
LeafReader reader = SlowCompositeReaderWrapper.wrap(topLevelReader);
readerContext = reader.getContext();
return readerContext;
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
index 31a17a684ee..15e4790ca9d 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
@@ -265,7 +265,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
final IndexFieldData indexFieldData = getForField("value");
final String missingValue = values[1];
- IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
+ IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, MultiValueMode.MIN, null);
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse)));
assertEquals(numDocs, topDocs.totalHits);
@@ -319,7 +319,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
}
}
final IndexFieldData indexFieldData = getForField("value");
- IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
+ IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", MultiValueMode.MIN, null);
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse)));
assertEquals(numDocs, topDocs.totalHits);
@@ -387,7 +387,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
writer.commit();
}
}
- DirectoryReader directoryReader = DirectoryReader.open(writer, true);
+ DirectoryReader directoryReader = DirectoryReader.open(writer);
directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader);
IndexFieldData> fieldData = getForField("text");
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java
index 26ea97dbf15..7ad8653260e 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java
@@ -125,7 +125,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);
- DirectoryReader perSegment = DirectoryReader.open(writer, true);
+ DirectoryReader perSegment = DirectoryReader.open(writer);
CompositeReaderContext composite = perSegment.getContext();
List leaves = composite.leaves();
for (LeafReaderContext atomicReaderContext : leaves) {
@@ -203,7 +203,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
duelFieldDataLong(random, context, leftFieldData, rightFieldData);
duelFieldDataLong(random, context, rightFieldData, leftFieldData);
- DirectoryReader perSegment = DirectoryReader.open(writer, true);
+ DirectoryReader perSegment = DirectoryReader.open(writer);
CompositeReaderContext composite = perSegment.getContext();
List leaves = composite.leaves();
for (LeafReaderContext atomicReaderContext : leaves) {
@@ -283,7 +283,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
duelFieldDataDouble(random, context, leftFieldData, rightFieldData);
duelFieldDataDouble(random, context, rightFieldData, leftFieldData);
- DirectoryReader perSegment = DirectoryReader.open(writer, true);
+ DirectoryReader perSegment = DirectoryReader.open(writer);
CompositeReaderContext composite = perSegment.getContext();
List leaves = composite.leaves();
for (LeafReaderContext atomicReaderContext : leaves) {
@@ -341,7 +341,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);
- DirectoryReader perSegment = DirectoryReader.open(writer, true);
+ DirectoryReader perSegment = DirectoryReader.open(writer);
CompositeReaderContext composite = perSegment.getContext();
List leaves = composite.leaves();
for (LeafReaderContext atomicReaderContext : leaves) {
@@ -449,7 +449,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision);
duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision);
- DirectoryReader perSegment = DirectoryReader.open(writer, true);
+ DirectoryReader perSegment = DirectoryReader.open(writer);
CompositeReaderContext composite = perSegment.getContext();
List leaves = composite.leaves();
for (LeafReaderContext atomicReaderContext : leaves) {
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
index 101e7368353..2d204d1003a 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
@@ -114,7 +114,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
Document doc = new Document();
doc.add(new StringField("s", "thisisastring", Store.NO));
writer.addDocument(doc);
- DirectoryReader open = DirectoryReader.open(writer, true);
+ DirectoryReader open = DirectoryReader.open(writer);
final boolean wrap = randomBoolean();
final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open;
final AtomicInteger onCacheCalled = new AtomicInteger();
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
index 1e0d8ecdf00..9e1b5d9d167 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
@@ -165,7 +165,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
public void testSorting() throws Exception {
IndexFieldData indexFieldData = getForField(parentType);
- IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
+ IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null);
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false)));
@@ -211,7 +211,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
public void testThreads() throws Exception {
final ParentChildIndexFieldData indexFieldData = getForField(childType);
- final DirectoryReader reader = DirectoryReader.open(writer, true);
+ final DirectoryReader reader = DirectoryReader.open(writer);
final IndexParentChildFieldData global = indexFieldData.loadGlobal(reader);
final AtomicReference error = new AtomicReference<>();
final int numThreads = scaledRandomIntBetween(3, 8);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java
index 53d5e1744eb..191ce5d477e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java
@@ -433,19 +433,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
-
- mapping = jsonBuilder().startObject().startObject("type")
- .startObject("_all")
- .startObject("fielddata")
- .field("format", "doc_values")
- .endObject().endObject().endObject().endObject().string();
- Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
- try {
- createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
- fail();
- } catch (MapperParsingException e) {
- assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
- }
}
public void testAutoBoost() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
index 7bed3ce091f..74fc98fddbe 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
@@ -85,7 +85,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
try (Directory dir = new RAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(getRandom())))) {
w.addDocuments(doc.docs());
- try (DirectoryReader reader = DirectoryReader.open(w, true)) {
+ try (DirectoryReader reader = DirectoryReader.open(w)) {
final LeafReader leaf = reader.leaves().get(0).reader();
// boolean fields are indexed and have doc values by default
assertEquals(new BytesRef("T"), leaf.terms("field").iterator().next());
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java
index bdb3f9762ef..8af92f266a5 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java
@@ -34,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
+import java.util.Arrays;
import static org.hamcrest.Matchers.equalTo;
@@ -200,4 +201,35 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(1, fields.length);
assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType());
}
+
+ public void testIndexOptions() throws IOException {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties").startObject("field").field("type", "keyword")
+ .field("index_options", "freqs").endObject().endObject()
+ .endObject().endObject().string();
+
+ DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
+
+ assertEquals(mapping, mapper.mappingSource().toString());
+
+ ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
+ .startObject()
+ .field("field", "1234")
+ .endObject()
+ .bytes());
+
+ IndexableField[] fields = doc.rootDoc().getFields("field");
+ assertEquals(2, fields.length);
+ assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
+
+ for (String indexOptions : Arrays.asList("positions", "offsets")) {
+ final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties").startObject("field").field("type", "keyword")
+ .field("index_options", indexOptions).endObject().endObject()
+ .endObject().endObject().string();
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> parser.parse("type", new CompressedXContent(mapping2)));
+ assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage());
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java
index 3056b63b4c0..4f4bbc65699 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java
@@ -19,11 +19,11 @@
package org.elasticsearch.index.mapper.date;
-import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute;
+import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.search.NumericRangeQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
@@ -189,7 +189,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset();
- NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class);
+ LegacyNumericTermAttribute nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class);
List values = new ArrayList<>();
while(tokenStream.incrementToken()) {
values.add(nta.getRawValue());
@@ -197,7 +197,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset();
- nta = tokenStream.addAttribute(NumericTermAttribute.class);
+ nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class);
int pos = 0;
while(tokenStream.incrementToken()) {
assertThat(values.get(pos++), equalTo(nta.getRawValue()));
@@ -256,10 +256,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
.bytes());
assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())));
- NumericRangeQuery rangeQuery;
+ LegacyNumericRangeQuery rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext(null));
- rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null);
+ rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null);
} finally {
SearchContext.removeCurrent();
}
@@ -282,10 +282,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
.bytes());
assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())));
- NumericRangeQuery rangeQuery;
+ LegacyNumericRangeQuery rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext(null));
- rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null);
+ rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null);
} finally {
SearchContext.removeCurrent();
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java
index 03c14ee1a45..8c25713ce3d 100755
--- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper.externalvalues;
-import com.spatial4j.core.shape.Point;
+import org.locationtech.spatial4j.shape.Point;
import org.apache.lucene.document.Field;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
index 558e3bc83fb..9d6236234af 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
@@ -56,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValues() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
MapperRegistry mapperRegistry = new MapperRegistry(
@@ -101,7 +101,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifield() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
Map mapperParsers = new HashMap<>();
@@ -159,7 +159,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings);
Map mapperParsers = new HashMap<>();
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
index ed6c574a865..6b9282e2704 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
@@ -66,7 +66,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -96,7 +96,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.field("geohash", true).endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -116,7 +116,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -136,7 +136,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -156,7 +156,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -172,7 +172,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNormalizeLatLonValuesDefault() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
// default to normalize
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
@@ -222,7 +222,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testValidateLatLonValues() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
if (version.before(Version.V_2_2_0)) {
@@ -285,7 +285,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNoValidateLatLonValues() throws Exception {
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
if (version.before(Version.V_2_2_0)) {
@@ -332,7 +332,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -359,7 +359,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -395,7 +395,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -419,7 +419,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -445,7 +445,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -481,7 +481,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -506,7 +506,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point")
.field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -530,7 +530,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -556,7 +556,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", true).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -699,7 +699,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);
@@ -724,7 +724,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
index 5de6c517ab2..bd23817ba50 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java
@@ -57,7 +57,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -81,7 +81,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -105,7 +105,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
@@ -126,7 +126,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.field("geohash_precision", 10).endObject().endObject().endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
@@ -140,7 +140,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
@@ -154,7 +154,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.endObject().endObject().string();
- Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java
index d171430dfff..05677d0ed8f 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java
@@ -64,7 +64,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
writer.addDocument(doc.rootDoc());
writer.addDocument(doc.rootDoc());
- IndexReader reader = DirectoryReader.open(writer, true);
+ IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", null), 10);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
index 0cd6fa0e1c9..9923846da0e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
@@ -76,7 +76,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
// Indexing a doc in the old way
FieldType fieldType = new FieldType();
fieldType.setStored(true);
- fieldType.setNumericType(FieldType.NumericType.INT);
+ fieldType.setNumericType(FieldType.LegacyNumericType.INT);
Document doc2 = new Document();
doc2.add(new StoredField("field1", new BytesRef(Numbers.intToBytes(1))));
doc2.add(new StoredField("field2", new BytesRef(Numbers.floatToBytes(1.1f))));
@@ -85,7 +85,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(3L))));
writer.addDocument(doc2);
- DirectoryReader reader = DirectoryReader.open(writer, true);
+ DirectoryReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = new IndexSearcher(reader);
Set fields = new HashSet<>(Arrays.asList("field1", "field2", "field3"));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
index bf21f2fd6d3..09804f82919 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper.numeric;
-import org.apache.lucene.analysis.NumericTokenStream;
+import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesType;
@@ -623,8 +623,8 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
// check the tokenstream actually used by the indexer
TokenStream ts = field.tokenStream(null, null);
- assertThat(ts, instanceOf(NumericTokenStream.class));
- assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep());
+ assertThat(ts, instanceOf(LegacyNumericTokenStream.class));
+ assertEquals(expected, ((LegacyNumericTokenStream)ts).getPrecisionStep());
}
public void testTermVectorsBackCompat() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java
index ed58bb63b65..d5efd6dcfc3 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java
@@ -414,27 +414,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
assertThat(request.timestamp(), is("1433239200000"));
}
- public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception {
+ public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject()
.endObject().endObject().string();
-
BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes();
-
- //
- // test with older versions
- Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build();
- DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
-
- MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
-
- // both index request are successfully processed
- IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01");
- oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
- IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890");
- oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
-
- //
// test with 2.x
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData();
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java
index ab0182aa0ef..2e2f5f2446f 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java
@@ -256,7 +256,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
}
public void testTimestampParsing() throws IOException {
- IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build());
+ IndexService indexService = createIndex("test");
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
boolean enabled = randomBoolean();
indexMapping.startObject()
diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
index 7ccad1ffd2a..b14d5f50776 100644
--- a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
@@ -22,7 +22,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.FuzzyQuery;
-import org.apache.lucene.search.NumericRangeQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.Fuzziness;
@@ -60,7 +60,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase numericRangeQuery = (NumericRangeQuery) query;
+ LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query;
assertTrue(numericRangeQuery.includesMin());
assertTrue(numericRangeQuery.includesMax());
diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java
index a4af84a8f79..238a186394d 100644
--- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java
@@ -25,9 +25,9 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
-import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
@@ -134,7 +134,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0);
Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext());
- NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query;
+ LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query;
assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L));
assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L));
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
index fbb708a5d97..9f99b85a294 100644
--- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.query;
-import org.apache.lucene.search.NumericRangeQuery;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
@@ -118,8 +118,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0);
Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext());
// since age is automatically registered in data, we encode it as numeric
- assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
- NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
+ assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class));
+ LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery;
assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME));
assertThat(rangeQuery.getMin().intValue(), equalTo(23));
assertThat(rangeQuery.getMax().intValue(), equalTo(54));
@@ -220,15 +220,15 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) {
+ if (getCurrentTypes().length > 0) {
Query luceneQuery = queryBuilder.toQuery(shardContext);
assertThat(luceneQuery, instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) luceneQuery;
diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java
index 4ed78e3f5ff..4b6788d463f 100644
--- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java
@@ -276,7 +276,7 @@ public class FunctionScoreTests extends ESTestCase {
d.add(new TextField("_uid", "1", Field.Store.YES));
w.addDocument(d);
w.commit();
- reader = DirectoryReader.open(w, true);
+ reader = DirectoryReader.open(w);
searcher = newSearcher(reader);
}
@@ -634,13 +634,11 @@ public class FunctionScoreTests extends ESTestCase {
ScoreFunction otherFunciton = function == null ? new DummyScoreFunction(combineFunction) : null;
FunctionScoreQuery diffFunction = new FunctionScoreQuery(q.getSubQuery(), otherFunciton, minScore, combineFunction, maxBoost);
FunctionScoreQuery diffMaxBoost = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost == 1.0f ? 0.9f : 1.0f);
- q1.setBoost(3.0f);
FunctionScoreQuery[] queries = new FunctionScoreQuery[] {
diffFunction,
diffMinScore,
diffQuery,
q,
- q1,
diffMaxBoost
};
final int numIters = randomIntBetween(20, 100);
@@ -678,7 +676,6 @@ public class FunctionScoreTests extends ESTestCase {
FiltersFunctionScoreQuery diffMinScore = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore == null ? 0.9f : null, combineFunction);
FilterFunction otherFunc = new FilterFunction(new TermQuery(new Term("filter", "other_query")), scoreFunction);
FiltersFunctionScoreQuery diffFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, randomBoolean() ? new FilterFunction[] {function, otherFunc} : new FilterFunction[] {otherFunc}, maxBoost, minScore, combineFunction);
- q1.setBoost(3.0f);
FiltersFunctionScoreQuery[] queries = new FiltersFunctionScoreQuery[] {
diffQuery,
@@ -687,7 +684,6 @@ public class FunctionScoreTests extends ESTestCase {
diffMode,
diffFunc,
q,
- q1,
diffCombineFunc
};
final int numIters = randomIntBetween(20, 100);
diff --git a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
index 44b91679623..d3c9975cf58 100644
--- a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.index.search.geo;
-import com.spatial4j.core.context.SpatialContext;
-import com.spatial4j.core.distance.DistanceUtils;
+import org.locationtech.spatial4j.context.SpatialContext;
+import org.locationtech.spatial4j.distance.DistanceUtils;
import org.apache.lucene.spatial.prefix.tree.Cell;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
index 4ef84d118fd..5b5b24bbe4b 100644
--- a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
@@ -218,7 +218,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD
writer.addDocument(document);
MultiValueMode sortMode = MultiValueMode.SUM;
- DirectoryReader directoryReader = DirectoryReader.open(writer, false);
+ DirectoryReader directoryReader = DirectoryReader.open(writer);
directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader);
Query parentFilter = new TermQuery(new Term("__type", "parent"));
diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
index ff82b7c43ac..d5e9ff85ba3 100644
--- a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
@@ -87,13 +87,13 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
docs.add(parent);
writer.addDocuments(docs);
if (rarely()) { // we need to have a bit more segments than what RandomIndexWriter would do by default
- DirectoryReader.open(writer, false).close();
+ DirectoryReader.open(writer).close();
}
}
writer.commit();
MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX));
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData1 = getForField("f");
@@ -278,7 +278,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
writer.addDocument(document);
MultiValueMode sortMode = MultiValueMode.MIN;
- DirectoryReader reader = DirectoryReader.open(writer, false);
+ DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData = getForField("field2");
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
index cf95f22ae3b..cfadab6efb8 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
@@ -56,7 +56,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
final AtomicInteger closeCalls = new AtomicInteger(0);
@@ -106,7 +106,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
@@ -148,7 +148,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
@@ -168,7 +168,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
index 105179a1f53..e960622d1c1 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
@@ -39,7 +39,7 @@ public class ShardUtilsTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.commit();
ShardId id = new ShardId("foo", "_na_", random().nextInt());
- try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) {
+ try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap));
}
@@ -53,7 +53,7 @@ public class ShardUtilsTests extends ESTestCase {
}
}
- try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) {
+ try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap));
CompositeReaderContext context = wrap.getContext();
diff --git a/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java b/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java
deleted file mode 100644
index 2fc02fb0503..00000000000
--- a/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.shard;
-
-import org.apache.lucene.analysis.CannedTokenStream;
-import org.apache.lucene.analysis.Token;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.NumericDocValuesField;
-import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.CodecReader;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.TestUtil;
-import org.elasticsearch.common.Numbers;
-import org.elasticsearch.index.mapper.internal.UidFieldMapper;
-import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
-import org.elasticsearch.test.ESTestCase;
-
-/** Tests upgrading old document versions from _uid payloads to _version docvalues */
-public class VersionFieldUpgraderTests extends ESTestCase {
-
- /** Simple test: one doc in the old format, check that it looks correct */
- public void testUpgradeOneDocument() throws Exception {
- Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
-
- // add a document with a _uid having a payload of 3
- Document doc = new Document();
- Token token = new Token("1", 0, 1);
- token.setPayload(new BytesRef(Numbers.longToBytes(3)));
- doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token)));
- iw.addDocument(doc);
- iw.commit();
-
- CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true));
- CodecReader upgraded = VersionFieldUpgrader.wrap(reader);
- // we need to be upgraded, should be a different instance
- assertNotSame(reader, upgraded);
-
- // make sure we can see our numericdocvalues in fieldinfos
- FieldInfo versionField = upgraded.getFieldInfos().fieldInfo(VersionFieldMapper.NAME);
- assertNotNull(versionField);
- assertEquals(DocValuesType.NUMERIC, versionField.getDocValuesType());
- // should have a value of 3, and be visible in docsWithField
- assertEquals(3, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(0));
- assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(0));
-
- // verify filterreader with checkindex
- TestUtil.checkReader(upgraded);
-
- reader.close();
- iw.close();
- dir.close();
- }
-
- /** test that we are a non-op if the segment already has the version field */
- public void testAlreadyUpgraded() throws Exception {
- Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
-
- // add a document with a _uid having a payload of 3
- Document doc = new Document();
- Token token = new Token("1", 0, 1);
- token.setPayload(new BytesRef(Numbers.longToBytes(3)));
- doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token)));
- doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 3));
- iw.addDocument(doc);
- iw.commit();
-
- CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true));
- CodecReader upgraded = VersionFieldUpgrader.wrap(reader);
- // we already upgraded: should be same instance
- assertSame(reader, upgraded);
-
- reader.close();
- iw.close();
- dir.close();
- }
-
- /** Test upgrading two documents */
- public void testUpgradeTwoDocuments() throws Exception {
- Directory dir = newDirectory();
- IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
-
- // add a document with a _uid having a payload of 3
- Document doc = new Document();
- Token token = new Token("1", 0, 1);
- token.setPayload(new BytesRef(Numbers.longToBytes(3)));
- doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token)));
- iw.addDocument(doc);
-
- doc = new Document();
- token = new Token("2", 0, 1);
- token.setPayload(new BytesRef(Numbers.longToBytes(4)));
- doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token)));
- iw.addDocument(doc);
-
- iw.commit();
-
- CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true));
- CodecReader upgraded = VersionFieldUpgrader.wrap(reader);
- // we need to be upgraded, should be a different instance
- assertNotSame(reader, upgraded);
-
- // make sure we can see our numericdocvalues in fieldinfos
- FieldInfo versionField = upgraded.getFieldInfos().fieldInfo(VersionFieldMapper.NAME);
- assertNotNull(versionField);
- assertEquals(DocValuesType.NUMERIC, versionField.getDocValuesType());
- // should have a values of 3 and 4, and be visible in docsWithField
- assertEquals(3, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(0));
- assertEquals(4, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(1));
- assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(0));
- assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(1));
-
- // verify filterreader with checkindex
- TestUtil.checkReader(upgraded);
-
- reader.close();
- iw.close();
- dir.close();
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java
new file mode 100644
index 00000000000..edb337fd4e6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.index.similarity;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.IndexSettingsModule;
+
+import java.util.Collections;
+
+public class SimilarityServiceTests extends ESTestCase {
+
+ // Tests #16594
+ public void testOverrideBuiltInSimilarity() {
+ Settings settings = Settings.builder().put("index.similarity.BM25.type", "classic").build();
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
+ try {
+ new SimilarityService(indexSettings, Collections.emptyMap());
+ fail("can't override bm25");
+ } catch (IllegalArgumentException ex) {
+ assertEquals(ex.getMessage(), "Cannot redefine built-in Similarity [BM25]");
+ }
+ }
+
+ // Pre v3 indices could override built-in similarities
+ public void testOverrideBuiltInSimilarityPreV3() {
+ Settings settings = Settings.builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0)
+ .put("index.similarity.BM25.type", "classic")
+ .build();
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
+ SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap());
+ assertTrue(service.getSimilarity("BM25") instanceof ClassicSimilarityProvider);
+ }
+
+ // Tests #16594
+ public void testDefaultSimilarity() {
+ Settings settings = Settings.builder().put("index.similarity.default.type", "BM25").build();
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
+ SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap());
+ assertTrue(service.getDefaultSimilarity() instanceof BM25SimilarityProvider);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
index 36fc5cf0717..eff41dd3ffe 100644
--- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
+++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
@@ -294,67 +294,6 @@ public class StoreTests extends ESTestCase {
IOUtils.close(verifyingOutput, dir);
}
- // TODO: remove this, its too fragile. just use a static old index instead.
- private static final class OldSIMockingCodec extends FilterCodec {
-
- protected OldSIMockingCodec() {
- super(new Lucene54Codec().getName(), new Lucene54Codec());
- }
-
- @Override
- public SegmentInfoFormat segmentInfoFormat() {
- final SegmentInfoFormat segmentInfoFormat = super.segmentInfoFormat();
- return new SegmentInfoFormat() {
- @Override
- public SegmentInfo read(Directory directory, String segmentName, byte[] segmentID, IOContext context) throws IOException {
- return segmentInfoFormat.read(directory, segmentName, segmentID, context);
- }
-
- // this sucks it's a full copy of Lucene50SegmentInfoFormat but hey I couldn't find a way to make it write 4_5_0 versions
- // somebody was too paranoid when implementing this. ey rmuir, was that you? - go fix it :P
- @Override
- public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
- final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
- si.addFile(fileName);
-
- boolean success = false;
- try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
- CodecUtil.writeIndexHeader(output,
- "Lucene50SegmentInfo",
- 0,
- si.getId(),
- "");
- Version version = Version.LUCENE_4_5_0; // FOOOOOO!!
- // Write the Lucene version that created this segment, since 3.1
- output.writeInt(version.major);
- output.writeInt(version.minor);
- output.writeInt(version.bugfix);
- assert version.prerelease == 0;
- output.writeInt(si.maxDoc());
-
- output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
- output.writeStringStringMap(si.getDiagnostics());
- Set files = si.files();
- for (String file : files) {
- if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
- throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
- }
- }
- output.writeStringSet(files);
- output.writeStringStringMap(si.getAttributes());
- CodecUtil.writeFooter(output);
- success = true;
- } finally {
- if (!success) {
- // TODO: are we doing this outside of the tracking wrapper? why must SIWriter cleanup like this?
- IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName);
- }
- }
- }
- };
- }
- }
-
public void testNewChecksums() throws IOException {
final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
@@ -381,7 +320,7 @@ public class StoreTests extends ESTestCase {
}
}
if (random().nextBoolean()) {
- DirectoryReader.open(writer, random().nextBoolean()).close(); // flush
+ DirectoryReader.open(writer).close(); // flush
}
Store.MetadataSnapshot metadata;
// check before we committed
@@ -472,32 +411,12 @@ public class StoreTests extends ESTestCase {
}
- final Adler32 adler32 = new Adler32();
final long luceneChecksum;
try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) {
assertEquals(luceneFileLength, indexInput.length());
luceneChecksum = CodecUtil.retrieveChecksum(indexInput);
}
- { // positive check
- StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0);
- assertTrue(Store.checkIntegrityNoException(lucene, dir));
- }
-
- { // negative check - wrong checksum
- StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0);
- assertFalse(Store.checkIntegrityNoException(lucene, dir));
- }
-
- { // negative check - wrong length
- StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0);
- assertFalse(Store.checkIntegrityNoException(lucene, dir));
- }
-
- { // negative check - wrong file
- StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0);
- assertFalse(Store.checkIntegrityNoException(lucene, dir));
- }
dir.close();
}
@@ -600,8 +519,6 @@ public class StoreTests extends ESTestCase {
dir = StoreTests.newDirectory(random);
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) dir).setPreventDoubleWrite(preventDoubleWrite);
- // TODO: fix this test to handle virus checker
- ((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
}
this.random = random;
}
@@ -859,28 +776,6 @@ public class StoreTests extends ESTestCase {
IOUtils.close(store);
}
- public void testCleanUpWithLegacyChecksums() throws IOException {
- Map metaDataMap = new HashMap<>();
- metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, "foobar", Version.LUCENE_4_8_0, new BytesRef(new byte[]{1})));
- metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", Version.LUCENE_4_8_0, new BytesRef()));
- Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0);
-
- final ShardId shardId = new ShardId("index", "_na_", 1);
- DirectoryService directoryService = new LuceneManagedDirectoryService(random());
- Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
- for (String file : metaDataMap.keySet()) {
- try (IndexOutput output = store.directory().createOutput(file, IOContext.DEFAULT)) {
- BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
- output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
- CodecUtil.writeFooter(output);
- }
- }
-
- store.verifyAfterCleanup(snapshot, snapshot);
- deleteContent(store.directory());
- IOUtils.close(store);
- }
-
public void testOnCloseCallback() throws IOException {
final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100));
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java
index d6e248f1c94..5a4aa2e6b24 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java
@@ -84,7 +84,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
w.addDocument(new Document());
- DirectoryReader r = DirectoryReader.open(w, false);
+ DirectoryReader r = DirectoryReader.open(w);
w.close();
ShardId shard = new ShardId("index", "_na_", 0);
r = ElasticsearchDirectoryReader.wrap(r, shard);
@@ -154,7 +154,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig());
w1.addDocument(new Document());
- DirectoryReader r1 = DirectoryReader.open(w1, false);
+ DirectoryReader r1 = DirectoryReader.open(w1);
w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
@@ -164,7 +164,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig());
w2.addDocument(new Document());
- DirectoryReader r2 = DirectoryReader.open(w2, false);
+ DirectoryReader r2 = DirectoryReader.open(w2);
w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
@@ -279,7 +279,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig());
w1.addDocument(new Document());
- DirectoryReader r1 = DirectoryReader.open(w1, false);
+ DirectoryReader r1 = DirectoryReader.open(w1);
w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
@@ -289,7 +289,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig());
w2.addDocument(new Document());
- DirectoryReader r2 = DirectoryReader.open(w2, false);
+ DirectoryReader r2 = DirectoryReader.open(w2);
w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
index bd48a388f34..e36f1bca49b 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
@@ -54,7 +54,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
AtomicBoolean indexShard = new AtomicBoolean(true);
@@ -107,7 +107,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
@@ -144,12 +144,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
- DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
+ DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
// initial cache
@@ -237,13 +237,13 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
- DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
@@ -263,18 +263,18 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
- DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
- DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0);
@@ -299,18 +299,18 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
- DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
- DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
- DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
+ DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
AtomicBoolean differentIdentity = new AtomicBoolean(true);
TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0);
diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java
index 09371c38dab..4597765c11c 100644
--- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java
@@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
-@ESBackcompatTestCase.CompatibilityVersion(version = Version.V_1_2_0_ID) // we throw an exception if we create an index with _field_names that is 1.3
public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase {
@Override
protected Collection> nodePlugins() {
diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
index 92b96d8e47d..23a197dbab6 100644
--- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
@@ -19,15 +19,8 @@
package org.elasticsearch.indices.analyze;
import org.elasticsearch.action.admin.indices.alias.Alias;
-import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
-import org.elasticsearch.common.ParseFieldMatcher;
-import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.core.IsNull;
@@ -196,53 +189,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
return randomBoolean() ? "test" : "alias";
}
- public void testParseXContentForAnalyzeReuqest() throws Exception {
- BytesReference content = XContentFactory.jsonBuilder()
- .startObject()
- .field("text", "THIS IS A TEST")
- .field("tokenizer", "keyword")
- .array("filters", "lowercase")
- .endObject().bytes();
-
- AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
-
- RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
-
- assertThat(analyzeRequest.text().length, equalTo(1));
- assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
- assertThat(analyzeRequest.tokenizer(), equalTo("keyword"));
- assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"}));
- }
-
- public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception {
- AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
-
- try {
- RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- fail("shouldn't get here");
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), equalTo("Failed to parse request body"));
- }
- }
-
- public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
- AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
- BytesReference invalidContent =XContentFactory.jsonBuilder()
- .startObject()
- .field("text", "THIS IS A TEST")
- .field("unknown", "keyword")
- .endObject().bytes();
-
- try {
- RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- fail("shouldn't get here");
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
- }
- }
-
public void testAnalyzerWithMultiValues() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
index b5f744ddc23..b69d1218546 100644
--- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
@@ -94,7 +94,7 @@ public class RecoverySourceHandlerTests extends ESTestCase {
@Override
public void close() throws IOException {
super.close();
- store.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it
+ targetStore.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it
}
};
} catch (IOException e) {
diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
index e29ad3e081a..467aa4d3309 100644
--- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
@@ -20,7 +20,6 @@
package org.elasticsearch.indices.stats;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
-import org.apache.lucene.util.Version;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
@@ -542,7 +541,6 @@ public class IndexStatsIT extends ESIntegTestCase {
assertThat(stats.getTotal().getSegments(), notNullValue());
assertThat(stats.getTotal().getSegments().getCount(), equalTo((long) test1.totalNumShards));
- assumeTrue("test doesn't work with 4.6.0", org.elasticsearch.Version.CURRENT.luceneVersion != Version.LUCENE_4_6_0);
assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0L));
}
diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java
new file mode 100644
index 00000000000..abfe18f8c58
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.test.ESIntegTestCase;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+
+@ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST)
+public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase {
+
+ private final BytesReference pipelineSource;
+ private volatile boolean installPlugin;
+
+ public IngestProcessorNotInstalledOnAllNodesIT() throws IOException {
+ pipelineSource = jsonBuilder().startObject()
+ .startArray("processors")
+ .startObject()
+ .startObject("test")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ }
+
+ @Override
+ protected Collection> nodePlugins() {
+ return installPlugin ? pluginList(IngestClientIT.IngestPlugin.class) : Collections.emptyList();
+ }
+
+ @Override
+ protected Collection> getMockPlugins() {
+ return Collections.singletonList(TestSeedPlugin.class);
+ }
+
+ public void testFailPipelineCreation() throws Exception {
+ installPlugin = true;
+ internalCluster().startNode();
+ installPlugin = false;
+ internalCluster().startNode();
+
+ try {
+ client().admin().cluster().preparePutPipeline("_id", pipelineSource).get();
+ fail("exception expected");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("Processor type [test] is not installed on node"));
+ }
+ }
+
+ public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws Exception {
+ internalCluster().startNode();
+ installPlugin = true;
+ internalCluster().startNode();
+
+ try {
+ client().admin().cluster().preparePutPipeline("_id", pipelineSource).get();
+ fail("exception expected");
+ } catch (ElasticsearchParseException e) {
+ assertThat(e.getMessage(), equalTo("No processor type exists with name [test]"));
+ }
+ }
+
+ // If there is pipeline defined and a node joins that doesn't have the processor installed then
+ // that pipeline can't be used on this node.
+ public void testFailStartNode() throws Exception {
+ installPlugin = true;
+ String node1 = internalCluster().startNode();
+
+ WritePipelineResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource).get();
+ assertThat(response.isAcknowledged(), is(true));
+ Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id");
+ assertThat(pipeline, notNullValue());
+
+ installPlugin = false;
+ String node2 = internalCluster().startNode();
+ pipeline = internalCluster().getInstance(NodeService.class, node2).getIngestService().getPipelineStore().get("_id");
+ assertThat(pipeline, nullValue());
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java
index fb0605f90b5..4009e4877b9 100644
--- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java
+++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java
@@ -21,24 +21,32 @@ package org.elasticsearch.ingest;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.Version;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.LocalTransportAddress;
+import org.elasticsearch.ingest.core.IngestInfo;
import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.ProcessorInfo;
+import org.elasticsearch.ingest.processor.RemoveProcessor;
import org.elasticsearch.ingest.processor.SetProcessor;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@@ -52,6 +60,7 @@ public class PipelineStoreTests extends ESTestCase {
store = new PipelineStore(Settings.EMPTY);
ProcessorsRegistry.Builder registryBuilder = new ProcessorsRegistry.Builder();
registryBuilder.registerProcessor("set", (templateService, registry) -> new SetProcessor.Factory(TestTemplateService.instance()));
+ registryBuilder.registerProcessor("remove", (templateService, registry) -> new RemoveProcessor.Factory(TestTemplateService.instance()));
store.buildProcessorFactoryRegistry(registryBuilder, null);
}
@@ -197,4 +206,38 @@ public class PipelineStoreTests extends ESTestCase {
assertThat(pipeline, nullValue());
}
+ public void testValidate() throws Exception {
+ PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}},{\"remove\" : {\"field\": \"_field\"}}]}"));
+
+ DiscoveryNode node1 = new DiscoveryNode("_node_id1", new LocalTransportAddress("_id"), Version.CURRENT);
+ DiscoveryNode node2 = new DiscoveryNode("_node_id2", new LocalTransportAddress("_id"), Version.CURRENT);
+ Map ingestInfos = new HashMap<>();
+ ingestInfos.put(node1, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove"))));
+ ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"))));
+
+ try {
+ store.validatePipeline(ingestInfos, putRequest);
+ fail("exception expected");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [{_node_id2}{local}{local[_id]}]"));
+ }
+
+ ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove"))));
+ store.validatePipeline(ingestInfos, putRequest);
+ }
+
+ public void testValidateNoIngestInfo() throws Exception {
+ PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}"));
+ try {
+ store.validatePipeline(Collections.emptyMap(), putRequest);
+ fail("exception expected");
+ } catch (IllegalStateException e) {
+ assertThat(e.getMessage(), equalTo("Ingest info is empty"));
+ }
+
+ DiscoveryNode discoveryNode = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT);
+ IngestInfo ingestInfo = new IngestInfo(Collections.singletonList(new ProcessorInfo("set")));
+ store.validatePipeline(Collections.singletonMap(discoveryNode, ingestInfo), putRequest);
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java
index fdf48ff4281..537d8f020e6 100644
--- a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java
@@ -23,11 +23,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.ProcessorsRegistry;
import org.elasticsearch.ingest.TestProcessor;
import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.processor.FailProcessor;
+import org.elasticsearch.ingest.processor.SetProcessor;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.prefs.PreferencesFactory;
@@ -115,6 +118,15 @@ public class PipelineFactoryTests extends ESTestCase {
assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound"));
}
+ public void testFlattenProcessors() throws Exception {
+ TestProcessor testProcessor = new TestProcessor(ingestDocument -> {});
+ CompoundProcessor processor1 = new CompoundProcessor(testProcessor, testProcessor);
+ CompoundProcessor processor2 = new CompoundProcessor(Collections.singletonList(testProcessor), Collections.singletonList(testProcessor));
+ Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor1, processor2));
+ List flattened = pipeline.flattenAllProcessors();
+ assertThat(flattened.size(), equalTo(4));
+ }
+
private ProcessorsRegistry createProcessorRegistry(Map processorRegistry) {
ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder();
for (Map.Entry entry : processorRegistry.entrySet()) {
diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
index 693ba4a2eba..2a845303675 100644
--- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
+++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
@@ -33,6 +33,7 @@ import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.http.HttpInfo;
+import org.elasticsearch.ingest.core.IngestInfo;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.monitor.os.DummyOsInfo;
import org.elasticsearch.monitor.os.OsInfo;
@@ -90,6 +91,7 @@ public class NodeInfoStreamingTests extends ESTestCase {
compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode());
compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs());
comparePluginsAndModules(nodeInfo, readNodeInfo);
+ compareJsonOutput(nodeInfo.getIngest(), readNodeInfo.getIngest());
}
private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException {
@@ -135,6 +137,7 @@ public class NodeInfoStreamingTests extends ESTestCase {
PluginsAndModules plugins = new PluginsAndModules();
plugins.addModule(DummyPluginInfo.INSTANCE);
plugins.addPlugin(DummyPluginInfo.INSTANCE);
- return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins);
+ IngestInfo ingestInfo = new IngestInfo();
+ return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo);
}
}
diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java
index 170b0be30df..8bef9138567 100644
--- a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java
@@ -147,7 +147,9 @@ public class PercolatorQueryTests extends ESTestCase {
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
- IndexSearcher shardSearcher = newSearcher(directoryReader);
+ // don't use newSearcher, which randomizes similarity. if it gets classic sim, the test eats it,
+ // as the score becomes 1 due to querynorm.
+ IndexSearcher shardSearcher = new IndexSearcher(directoryReader);
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
index 37a0f4e358e..04bff31057d 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
@@ -176,12 +176,12 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc",
"name", "my_plugin",
"version", "1.0",
- "elasticsearch.version", Version.V_1_7_0.toString());
+ "elasticsearch.version", Version.V_2_0_0.toString());
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected old elasticsearch version exception");
} catch (IllegalArgumentException e) {
- assertTrue(e.getMessage().contains("Was designed for version [1.7.0]"));
+ assertTrue(e.getMessage().contains("Was designed for version [2.0.0]"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java
new file mode 100644
index 00000000000..34e8315372b
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.rest.action.admin.indices.analyze;
+
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.startsWith;
+
+public class RestAnalyzeActionTests extends ESTestCase {
+
+ public void testParseXContentForAnalyzeRequest() throws Exception {
+ BytesReference content = XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("tokenizer", "keyword")
+ .array("filters", "lowercase")
+ .endObject().bytes();
+
+ AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
+
+ RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
+
+ assertThat(analyzeRequest.text().length, equalTo(1));
+ assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
+ assertThat(analyzeRequest.tokenizer(), equalTo("keyword"));
+ assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"}));
+ }
+
+ public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception {
+ AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
+
+ try {
+ RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
+ fail("shouldn't get here");
+ } catch (Exception e) {
+ assertThat(e, instanceOf(IllegalArgumentException.class));
+ assertThat(e.getMessage(), equalTo("Failed to parse request body"));
+ }
+ }
+
+ public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
+ AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
+ BytesReference invalidContent = XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("unknown", "keyword")
+ .endObject().bytes();
+
+ try {
+ RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
+ fail("shouldn't get here");
+ } catch (Exception e) {
+ assertThat(e, instanceOf(IllegalArgumentException.class));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
+ }
+ }
+
+ public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception {
+ AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
+ BytesReference invalidExplain = XContentFactory.jsonBuilder()
+ .startObject()
+ .field("explain", "fals")
+ .endObject().bytes();
+ try {
+ RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
+ fail("shouldn't get here");
+ } catch (Exception e) {
+ assertThat(e, instanceOf(IllegalArgumentException.class));
+ assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
+ }
+ }
+
+
+}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index 2e4a974b778..c2ac2078c06 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -18,8 +18,6 @@
*/
package org.elasticsearch.search.aggregations.bucket;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.Scorer;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.joda.DateMathParser;
@@ -28,22 +26,14 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.script.CompiledScript;
-import org.elasticsearch.script.ExecutableScript;
-import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
-import org.elasticsearch.script.ScriptEngineRegistry;
-import org.elasticsearch.script.ScriptEngineService;
-import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
-import org.elasticsearch.script.SearchScript;
+import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
-import org.elasticsearch.search.lookup.LeafSearchLookup;
-import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
@@ -55,7 +45,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
-import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -142,8 +131,7 @@ public class DateHistogramIT extends ESIntegTestCase {
@Override
protected Collection> nodePlugins() {
return Arrays.asList(
- ExtractFieldScriptPlugin.class,
- FieldValueScriptPlugin.class);
+ DateScriptsMockPlugin.class);
}
@After
@@ -466,10 +454,12 @@ public class DateHistogramIT extends ESIntegTestCase {
}
public void testSingleValuedFieldWithValueScript() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "date");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("date")
- .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))
+ .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params))
.dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet();
assertSearchResponse(response);
@@ -600,10 +590,12 @@ public class DateHistogramIT extends ESIntegTestCase {
* doc 6: [ Apr 23, May 24]
*/
public void testMultiValuedFieldWithValueScript() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "dates");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("dates")
- .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))
+ .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params))
.dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet();
assertSearchResponse(response);
@@ -652,8 +644,11 @@ public class DateHistogramIT extends ESIntegTestCase {
* Mar 23
*/
public void testScriptSingleValue() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "date");
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH))
+ .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME,
+ ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH))
.execute().actionGet();
assertSearchResponse(response);
@@ -687,8 +682,11 @@ public class DateHistogramIT extends ESIntegTestCase {
}
public void testScriptMultiValued() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "dates");
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH))
+ .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME,
+ ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH))
.execute().actionGet();
assertSearchResponse(response);
@@ -1148,256 +1146,4 @@ public class DateHistogramIT extends ESIntegTestCase {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), greaterThan(0));
}
-
- /**
- * Mock plugin for the {@link ExtractFieldScriptEngine}
- */
- public static class ExtractFieldScriptPlugin extends Plugin {
-
- @Override
- public String name() {
- return ExtractFieldScriptEngine.NAME;
- }
-
- @Override
- public String description() {
- return "Mock script engine for " + DateHistogramIT.class;
- }
-
- public void onModule(ScriptModule module) {
- module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES));
- }
-
- }
-
- /**
- * This mock script returns the field that is specified by name in the script body
- */
- public static class ExtractFieldScriptEngine implements ScriptEngineService {
-
- public static final String NAME = "extract_field";
-
- public static final List TYPES = Collections.singletonList(NAME);
-
- @Override
- public void close() throws IOException {
- }
-
- @Override
- public List getTypes() {
- return TYPES;
- }
-
- @Override
- public List getExtensions() {
- return TYPES;
- }
-
- @Override
- public boolean isSandboxed() {
- return true;
- }
-
- @Override
- public Object compile(String script, Map params) {
- return script;
- }
-
- @Override
- public ExecutableScript executable(CompiledScript compiledScript, Map params) {
- throw new UnsupportedOperationException();
- }
- @Override
- public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) {
- return new SearchScript() {
-
- @Override
- public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
-
- final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
-
- return new LeafSearchScript() {
- @Override
- public void setNextVar(String name, Object value) {
- }
-
- @Override
- public Object run() {
- String fieldName = (String) compiledScript.compiled();
- return leafLookup.doc().get(fieldName);
- }
-
- @Override
- public void setScorer(Scorer scorer) {
- }
-
- @Override
- public void setSource(Map source) {
- }
-
- @Override
- public void setDocument(int doc) {
- if (leafLookup != null) {
- leafLookup.setDocument(doc);
- }
- }
-
- @Override
- public long runAsLong() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public float runAsFloat() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public double runAsDouble() {
- throw new UnsupportedOperationException();
- }
- };
- }
-
- @Override
- public boolean needsScores() {
- return false;
- }
- };
- }
-
- @Override
- public void scriptRemoved(CompiledScript script) {
- }
- }
-
- /**
- * Mock plugin for the {@link FieldValueScriptEngine}
- */
- public static class FieldValueScriptPlugin extends Plugin {
-
- @Override
- public String name() {
- return FieldValueScriptEngine.NAME;
- }
-
- @Override
- public String description() {
- return "Mock script engine for " + DateHistogramIT.class;
- }
-
- public void onModule(ScriptModule module) {
- module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES));
- }
-
- }
-
- /**
- * This mock script returns the field value and adds one month to the returned date
- */
- public static class FieldValueScriptEngine implements ScriptEngineService {
-
- public static final String NAME = "field_value";
-
- public static final List TYPES = Collections.singletonList(NAME);
-
- @Override
- public void close() throws IOException {
- }
-
- @Override
- public List getTypes() {
- return TYPES;
- }
-
- @Override
- public List getExtensions() {
- return TYPES;
- }
-
- @Override
- public boolean isSandboxed() {
- return true;
- }
-
- @Override
- public Object compile(String script, Map params) {
- return script;
- }
-
- @Override
- public ExecutableScript executable(CompiledScript compiledScript, Map params) {
- throw new UnsupportedOperationException();
- }
- @Override
- public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) {
- return new SearchScript() {
-
- private Map vars = new HashMap<>(2);
-
- @Override
- public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
-
- final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
-
- return new LeafSearchScript() {
-
- @Override
- public Object unwrap(Object value) {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public void setNextVar(String name, Object value) {
- vars.put(name, value);
- }
-
- @Override
- public Object run() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public void setScorer(Scorer scorer) {
- }
-
- @Override
- public void setSource(Map source) {
- }
-
- @Override
- public void setDocument(int doc) {
- if (leafLookup != null) {
- leafLookup.setDocument(doc);
- }
- }
-
- @Override
- public long runAsLong() {
- return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis();
- }
-
- @Override
- public float runAsFloat() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public double runAsDouble() {
- return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis();
- }
- };
- }
-
- @Override
- public boolean needsScores() {
- return false;
- }
- };
- }
-
- @Override
- public void scriptRemoved(CompiledScript script) {
- }
- }
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
index cc96555c372..2200e0e30ca 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
@@ -65,13 +65,6 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
return Collections.singleton(AssertingLocalTransport.TestPlugin.class);
}
- @Override
- protected Settings nodeSettings(int nodeOrdinal) {
- return Settings.builder()
- .put(super.nodeSettings(nodeOrdinal))
- .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build();
- }
-
@Before
public void beforeEachTest() throws IOException {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
similarity index 92%
rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java
rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
index 44f7a93ade1..b1dc61a9b9e 100644
--- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
@@ -16,13 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.messy.tests;
+package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
-import org.elasticsearch.script.groovy.GroovyPlugin;
+import org.elasticsearch.script.ScriptService.ScriptType;
+import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
@@ -36,8 +37,9 @@ import org.joda.time.DateTimeZone;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
-import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@@ -55,12 +57,7 @@ import static org.hamcrest.core.IsNull.nullValue;
*
*/
@ESIntegTestCase.SuiteScopeTestCase
-public class DateRangeTests extends ESIntegTestCase {
-
- @Override
- protected Collection> nodePlugins() {
- return Collections.singleton(GroovyPlugin.class);
- }
+public class DateRangeIT extends ESIntegTestCase {
private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception {
return client().prepareIndex("idx", "type").setSource(jsonBuilder()
@@ -72,7 +69,11 @@ public class DateRangeTests extends ESIntegTestCase {
}
private static DateTime date(int month, int day) {
- return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
+ return date(month, day, DateTimeZone.UTC);
+ }
+
+ private static DateTime date(int month, int day, DateTimeZone timezone) {
+ return new DateTime(2012, month, day, 0, 0, timezone);
}
private static int numDocs;
@@ -107,18 +108,26 @@ public class DateRangeTests extends ESIntegTestCase {
ensureSearchable();
}
+ @Override
+ protected Collection> nodePlugins() {
+ return Arrays.asList(
+ DateScriptsMockPlugin.class);
+ }
+
public void testDateMath() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "date");
DateRangeAggregatorBuilder rangeBuilder = dateRange("range");
if (randomBoolean()) {
rangeBuilder.field("date");
} else {
- rangeBuilder.script(new Script("doc['date'].value"));
+ rangeBuilder.script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params));
}
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y")
- .addUnboundedFrom("last year", "now-1y")).execute().actionGet();
+ .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).execute().actionGet();
assertSearchResponse(response);
@@ -286,17 +295,25 @@ public class DateRangeTests extends ESIntegTestCase {
}
public void testSingleValueFieldWithDateMath() throws Exception {
+ String[] ids = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]);
+ DateTimeZone timezone = DateTimeZone.forID(randomFrom(ids));
+ int timeZoneOffset = timezone.getOffset(date(2, 15));
+ // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format
+ String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ");
+ String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ");
+ long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L;
+
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateRange("range")
.field("date")
.addUnboundedTo("2012-02-15")
.addRange("2012-02-15", "2012-02-15||+1M")
- .addUnboundedFrom("2012-02-15||+1M"))
+ .addUnboundedFrom("2012-02-15||+1M")
+ .timeZone(timezone))
.execute().actionGet();
assertSearchResponse(response);
-
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
@@ -305,30 +322,31 @@ public class DateRangeTests extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
+ assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix));
assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat(bucket.getFromAsString(), nullValue());
- assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
- assertThat(bucket.getDocCount(), equalTo(2L));
+ assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
+ assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
- assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
- assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
+ assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix +
+ "-2012-03-15T00:00:00.000" + mar15Suffix));
+ assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
+ assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
+ assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
+ assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*"));
+ assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat(((DateTime) bucket.getTo()), nullValue());
- assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
+ assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
+ assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount));
}
public void testSingleValueFieldWithCustomKey() throws Exception {
@@ -520,10 +538,12 @@ public class DateRangeTests extends ESIntegTestCase {
public void testMultiValuedFieldWithValueScript() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "dates");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateRange("range")
.field("dates")
- .script(new Script("new DateTime(_value.longValue(), DateTimeZone.UTC).plusMonths(1).getMillis()"))
+ .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params))
.addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute()
.actionGet();
@@ -575,9 +595,11 @@ public class DateRangeTests extends ESIntegTestCase {
*/
public void testScriptSingleValue() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "date");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateRange("range")
- .script(new Script("doc['date'].value"))
+ .script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params))
.addUnboundedTo(date(2, 15))
.addRange(date(2, 15), date(3, 15))
.addUnboundedFrom(date(3, 15)))
@@ -634,11 +656,14 @@ public class DateRangeTests extends ESIntegTestCase {
*/
public void testScriptMultiValued() throws Exception {
+ Map params = new HashMap<>();
+ params.put("fieldname", "dates");
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
- dateRange("range").script(new Script("doc['dates'].values")).addUnboundedTo(date(2, 15))
- .addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute().actionGet();
+ dateRange("range").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params))
+ .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15))
+ .addUnboundedFrom(date(3, 15))).execute().actionGet();
assertSearchResponse(response);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
index 94156fc3a5d..71b61c0e6e6 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
@@ -22,9 +22,12 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
+import org.joda.time.DateTimeZone;
public class DateRangeTests extends BaseAggregationTestCase {
+ private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]);
+
@Override
protected DateRangeAggregatorBuilder createTestAggregatorBuilder() {
int numRanges = randomIntBetween(1, 10);
@@ -56,6 +59,9 @@ public class DateRangeTests extends BaseAggregationTestCase params) {
+ return new ExtractFieldScript((String) params.get("fieldname"));
+ }
+ @Override
+ public boolean needsScores() {
+ return false;
+ }
+ }
+
+ public static class ExtractFieldScript extends AbstractSearchScript {
+
+ public static final String NAME = "extract_field";
+ private String fieldname;
+
+ public ExtractFieldScript(String fieldname) {
+ this.fieldname = fieldname;
+ }
+
+ @Override
+ public Object run() {
+ return doc().get(fieldname);
+ }
+ }
+
+ public static class PlusOneMonthScriptFactory implements NativeScriptFactory {
+
+ @Override
+ public ExecutableScript newScript(Map params) {
+ return new PlusOneMonthScript((String) params.get("fieldname"));
+ }
+
+ @Override
+ public boolean needsScores() {
+ return false;
+ }
+ }
+
+ /**
+ * This mock script takes date field value and adds one month to the returned date
+ */
+ public static class PlusOneMonthScript extends AbstractSearchScript {
+
+ public static final String NAME = "date_plus_1_month";
+ private String fieldname;
+
+ private Map vars = new HashMap<>();
+
+ public PlusOneMonthScript(String fieldname) {
+ this.fieldname = fieldname;
+ }
+
+ @Override
+ public void setNextVar(String name, Object value) {
+ vars.put(name, value);
+ }
+
+ @Override
+ public long runAsLong() {
+ return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis();
+ }
+
+ @Override
+ public double runAsDouble() {
+ return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis();
+ }
+
+ @Override
+ public Object run() {
+ return new UnsupportedOperationException();
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
index 8312f4aca04..6d2d11e2799 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
@@ -67,7 +67,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
- private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
index 22413a7b319..5aa7ba44466 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java
@@ -63,7 +63,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
- private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
+ private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
static ObjectIntMap expectedDocCountsForGeoHash = null;
static ObjectIntMap multiValuedExpectedDocCountsForGeoHash = null;
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java
index 60810ee4df6..7587866b144 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java
@@ -21,7 +21,7 @@ package org.elasticsearch.search.fetch.innerhits;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.IntField;
+import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
@@ -66,7 +66,7 @@ public class NestedChildrenFilterTests extends ESTestCase {
Document parenDoc = new Document();
parenDoc.add(new StringField("type", "parent", Field.Store.NO));
- parenDoc.add(new IntField("num_child_docs", numChildDocs, Field.Store.YES));
+ parenDoc.add(new LegacyIntField("num_child_docs", numChildDocs, Field.Store.YES));
docs.add(parenDoc);
writer.addDocuments(docs);
}
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
index e96b4d69b00..175adc27892 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
@@ -31,8 +31,10 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
+import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchHits;
@@ -50,6 +52,7 @@ import java.util.Locale;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.boostingQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
@@ -74,6 +77,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
return pluginList(InternalSettingsPlugin.class); // uses index.version.created
}
+ private final QueryBuilder baseQuery = constantScoreQuery(termQuery("test", "value"));
+
public void testDistanceScoreGeoLinGaussExp() throws Exception {
assertAcked(prepareCreate("test").addMapping(
"type1",
@@ -117,7 +122,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
ActionFuture response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
- searchSource().query(constantScoreQuery(termQuery("test", "value")))));
+ searchSource().query(baseQuery)));
SearchResponse sr = response.actionGet();
SearchHits sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -125,7 +130,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km")))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km")))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -136,7 +141,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
- searchSource().query(constantScoreQuery(termQuery("test", "value")))));
+ searchSource().query(baseQuery)));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -144,7 +149,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("loc", lonlat, "1000km")))));
+ functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km")))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -155,7 +160,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
- searchSource().query(constantScoreQuery(termQuery("test", "value")))));
+ searchSource().query(baseQuery)));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -163,7 +168,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), exponentialDecayFunction("loc", lonlat, "1000km")))));
+ functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km")))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2)));
@@ -314,30 +319,30 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
.setSource(
jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 20).field("lon", 11).endObject()
.endObject()).setRefresh(true).get();
-
+ FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(randomIntBetween(1, 10)));
GeoPoint point = new GeoPoint(20, 11);
ActionFuture response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", point, "1000km")).boostMode(
- CombineFunction.MULTIPLY))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("loc", point, "1000km")).boostMode(
+ CombineFunction.REPLACE))));
SearchResponse sr = response.actionGet();
SearchHits sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5));
+ assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5));
+ // this is equivalent to new GeoPoint(20, 11); just flipped so scores must be same
float[] coords = { 11, 20 };
-
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", coords, "1000km")).boostMode(
- CombineFunction.MULTIPLY))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("loc", coords, "1000km")).boostMode(
+ CombineFunction.REPLACE))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5));
+ assertThat((double) sh.getAt(0).score(), closeTo(1.0f, 1.e-5));
}
public void testCombineModes() throws Exception {
@@ -348,26 +353,25 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
ensureYellow();
client().prepareIndex().setType("type1").setId("1").setIndex("test")
- .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()).setRefresh(true).get();
-
- // function score should return 0.5 for this function
-
+ .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()).setRefresh(true).get();
+ FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(2));
+ // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score
ActionFuture response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.MULTIPLY))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.MULTIPLY))));
SearchResponse sr = response.actionGet();
SearchHits sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo(0.153426408, 1.e-5));
+ assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.REPLACE))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.REPLACE))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
@@ -377,48 +381,48 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.SUM))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.SUM))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo(0.30685282 + 0.5, 1.e-5));
+ assertThat((double) sh.getAt(0).score(), closeTo(2.0 + 0.5, 1.e-5));
logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).id(), sr.getHits().getAt(0).explanation());
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.AVG))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.AVG))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5) / 2, 1.e-5));
+ assertThat((double) sh.getAt(0).score(), closeTo((2.0 + 0.5) / 2, 1.e-5));
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.MIN))));
- sr = response.actionGet();
- sh = sr.getHits();
- assertThat(sh.getTotalHits(), equalTo((long) (1)));
- assertThat(sh.getAt(0).getId(), equalTo("1"));
- assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5));
-
- response = client().search(
- searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
- searchSource().query(
- functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost(
- 2.0f).boostMode(CombineFunction.MAX))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.MIN))));
sr = response.actionGet();
sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (1)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5));
+ response = client().search(
+ searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
+ searchSource().query(
+ functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5))
+ .boostMode(CombineFunction.MAX))));
+ sr = response.actionGet();
+ sh = sr.getHits();
+ assertThat(sh.getTotalHits(), equalTo((long) (1)));
+ assertThat(sh.getAt(0).getId(), equalTo("1"));
+ assertThat((double) sh.getAt(0).score(), closeTo(2.0, 1.e-5));
+
}
public void testExceptionThrownIfScaleLE0() throws Exception {
@@ -509,7 +513,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
ActionFuture response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
+ functionScoreQuery(baseQuery, new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1"))
}).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY))));
@@ -733,7 +737,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
ActionFuture response = client().search(
searchRequest().source(
- searchSource().query(constantScoreQuery(termQuery("test", "value")))));
+ searchSource().query(baseQuery)));
SearchResponse sr = response.actionGet();
assertSearchHits(sr, "1", "2");
SearchHits sh = sr.getHits();
@@ -745,7 +749,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)))));
sr = response.actionGet();
assertSearchHits(sr, "1", "2");
sh = sr.getHits();
@@ -755,7 +759,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)))));
+ functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)))));
sr = response.actionGet();
assertSearchHits(sr, "1", "2");
sh = sr.getHits();
@@ -784,7 +788,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)))));
+ functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)))));
sr = response.actionGet();
assertSearchHits(sr, "1", "2");
sh = sr.getHits();
@@ -795,7 +799,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
response = client().search(
searchRequest().source(
searchSource().query(
- functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)))));
+ functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)))));
sr = response.actionGet();
assertSearchHits(sr, "1", "2");
sh = sr.getHits();
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
index 8a060af2ab0..8f04bd72756 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
@@ -19,10 +19,10 @@
package org.elasticsearch.search.geo;
-import com.spatial4j.core.context.SpatialContext;
-import com.spatial4j.core.distance.DistanceUtils;
-import com.spatial4j.core.exception.InvalidShapeException;
-import com.spatial4j.core.shape.Shape;
+import org.locationtech.spatial4j.context.SpatialContext;
+import org.locationtech.spatial4j.distance.DistanceUtils;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Shape;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
index 7afbeaa9abf..e41e3c178c5 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.search.geo;
-import com.spatial4j.core.shape.Rectangle;
+import org.locationtech.spatial4j.shape.Rectangle;
import com.vividsolutions.jts.geom.Coordinate;
import org.elasticsearch.action.get.GetResponse;
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
index 9f898a47c06..f34d5b33c9d 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
@@ -268,78 +268,6 @@ public class HighlighterSearchIT extends ESIntegTestCase {
equalTo("Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com"));
}
- public void testNgramHighlightingPreLucene42() throws IOException {
- assertAcked(prepareCreate("test")
- .addMapping("test",
- "name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets",
- "name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets")
- .setSettings(settingsBuilder()
- .put(indexSettings())
- .put("analysis.filter.my_ngram.max_gram", 20)
- .put("analysis.filter.my_ngram.version", "4.1")
- .put("analysis.filter.my_ngram.min_gram", 1)
- .put("analysis.filter.my_ngram.type", "ngram")
- .put("analysis.tokenizer.my_ngramt.max_gram", 20)
- .put("analysis.tokenizer.my_ngramt.version", "4.1")
- .put("analysis.tokenizer.my_ngramt.min_gram", 1)
- .put("analysis.tokenizer.my_ngramt.type", "ngram")
- .put("analysis.analyzer.name_index_analyzer.tokenizer", "my_ngramt")
- .put("analysis.analyzer.name2_index_analyzer.tokenizer", "whitespace")
- .putArray("analysis.analyzer.name2_index_analyzer.filter", "lowercase", "my_ngram")
- .put("analysis.analyzer.name_search_analyzer.tokenizer", "whitespace")
- .put("analysis.analyzer.name_search_analyzer.filter", "lowercase")));
- ensureYellow();
- client().prepareIndex("test", "test", "1")
- .setSource("name", "logicacmg ehemals avinci - the know how company",
- "name2", "logicacmg ehemals avinci - the know how company").get();
- client().prepareIndex("test", "test", "2")
- .setSource("name", "avinci, unilog avinci, logicacmg, logica",
- "name2", "avinci, unilog avinci, logicacmg, logica").get();
- refresh();
-
- SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica m")))
- .highlighter(new HighlightBuilder().field("name")).get();
- assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
-
- search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica ma")))
- .highlighter(new HighlightBuilder().field("name")).get();
- assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
-
- search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica")))
- .highlighter(new HighlightBuilder().field("name")).get();
- assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
-
- search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica m")))
- .highlighter(new HighlightBuilder().field("name2")).get();
- assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
-
- search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica ma")))
- .highlighter(new HighlightBuilder().field("name2")).get();
- assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
-
- search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica")))
- .highlighter(new HighlightBuilder().field("name2")).get();
- assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"),
- equalTo("avinci, unilog avinci, logicacmg, logica")));
- }
-
public void testNgramHighlighting() throws IOException {
assertAcked(prepareCreate("test")
.addMapping("test",
diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
index fad1cc3a0ef..084e07e0389 100644
--- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
@@ -20,15 +20,12 @@
package org.elasticsearch.search.innerhits;
import org.apache.lucene.util.ArrayUtil;
-import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.BoolQueryBuilder;
-import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.plugins.Plugin;
@@ -75,7 +72,7 @@ import static org.hamcrest.Matchers.nullValue;
public class InnerHitsIT extends ESIntegTestCase {
@Override
protected Collection> nodePlugins() {
- return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class);
+ return pluginList(MockScriptEngine.TestPlugin.class);
}
public void testSimpleNested() throws Exception {
@@ -753,160 +750,6 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
}
- public void testNestedInnerHitsWithStoredFieldsAndNoSourceBackcompat() throws Exception {
- assertAcked(prepareCreate("articles")
- .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
- .addMapping("article", jsonBuilder().startObject()
- .startObject("_source").field("enabled", false).endObject()
- .startObject("properties")
- .startObject("comments")
- .field("type", "nested")
- .startObject("properties")
- .startObject("message").field("type", "text").field("store", true).endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- )
- );
-
- List requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
- .field("title", "quick brown fox")
- .startObject("comments").field("message", "fox eat quick").endObject()
- .endObject()));
- indexRandom(true, requests);
-
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message"))))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick"));
- }
-
- public void testNestedInnerHitsWithHighlightOnStoredFieldBackcompat() throws Exception {
- assertAcked(prepareCreate("articles")
- .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
- .addMapping("article", jsonBuilder().startObject()
- .startObject("_source").field("enabled", false).endObject()
- .startObject("properties")
- .startObject("comments")
- .field("type", "nested")
- .startObject("properties")
- .startObject("message").field("type", "text").field("store", true).endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- )
- );
-
- List requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
- .field("title", "quick brown fox")
- .startObject("comments").field("message", "fox eat quick").endObject()
- .endObject()));
- indexRandom(true, requests);
- InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
- builder.highlighter(new HighlightBuilder().field("comments.message"));
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
- assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick"));
- }
-
- public void testNestedInnerHitsWithExcludeSourceBackcompat() throws Exception {
- assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
- .addMapping("article", jsonBuilder().startObject()
- .startObject("_source").field("excludes", new String[]{"comments"}).endObject()
- .startObject("properties")
- .startObject("comments")
- .field("type", "nested")
- .startObject("properties")
- .startObject("message").field("type", "text").field("store", true).endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- )
- );
-
- List requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
- .field("title", "quick brown fox")
- .startObject("comments").field("message", "fox eat quick").endObject()
- .endObject()));
- indexRandom(true, requests);
- InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
- builder.field("comments.message");
- builder.setFetchSource(true);
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick"));
- }
-
- public void testNestedInnerHitsHiglightWithExcludeSourceBackcompat() throws Exception {
- assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
- .addMapping("article", jsonBuilder().startObject()
- .startObject("_source").field("excludes", new String[]{"comments"}).endObject()
- .startObject("properties")
- .startObject("comments")
- .field("type", "nested")
- .startObject("properties")
- .startObject("message").field("type", "text").field("store", true).endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- )
- );
-
- List requests = new ArrayList<>();
- requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
- .field("title", "quick brown fox")
- .startObject("comments").field("message", "fox eat quick").endObject()
- .endObject()));
- indexRandom(true, requests);
- InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit();
- builder.highlighter(new HighlightBuilder().field("comments.message"));
- SearchResponse response = client().prepareSearch("articles")
- .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder)))
- .get();
- assertNoFailures(response);
- assertHitCount(response, 1);
- assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
- assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick"));
- }
-
public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception {
assertAcked(prepareCreate("articles")
.addMapping("article", jsonBuilder().startObject()
diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
index f65b17288ae..e0bc26c9296 100644
--- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
@@ -180,7 +180,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
// the doc id is the tie-breaker
}
assertThat(topNIds, empty());
- assertThat(searchResponse.getHits().hits()[0].getScore(), equalTo(searchResponse.getHits().hits()[1].getScore()));
+ assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore()));
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category")
diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index 079363719f1..44b8636d51a 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -375,9 +375,9 @@ public class SearchQueryIT extends ESIntegTestCase {
// try the same with multi match query
searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get();
assertHitCount(searchResponse, 3L);
- assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats
- assertSecondHit(searchResponse, hasId("1"));
- assertThirdHit(searchResponse, hasId("2"));
+ assertFirstHit(searchResponse, hasId("1"));
+ assertSecondHit(searchResponse, hasId("2"));
+ assertThirdHit(searchResponse, hasId("3"));
}
public void testCommonTermsQueryStackedTokens() throws Exception {
@@ -467,9 +467,9 @@ public class SearchQueryIT extends ESIntegTestCase {
// try the same with multi match query
searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get();
assertHitCount(searchResponse, 3L);
- assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats
- assertSecondHit(searchResponse, hasId("1"));
- assertThirdHit(searchResponse, hasId("2"));
+ assertFirstHit(searchResponse, hasId("1"));
+ assertSecondHit(searchResponse, hasId("2"));
+ assertThirdHit(searchResponse, hasId("3"));
}
public void testQueryStringAnalyzedWildcard() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
index a39c618fe9d..f0bb35cc9d1 100644
--- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
@@ -39,7 +39,6 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
-import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
index 812928dee28..2143c7be9e0 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
@@ -96,7 +96,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
writer.addDocument(doc);
}
- DirectoryReader ir = DirectoryReader.open(writer, false);
+ DirectoryReader ir = DirectoryReader.open(writer);
WordScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f);
NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker();
@@ -238,7 +238,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
writer.addDocument(doc);
}
- DirectoryReader ir = DirectoryReader.open(writer, false);
+ DirectoryReader ir = DirectoryReader.open(writer);
LaplaceScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f);
NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker();
DirectSpellChecker spellchecker = new DirectSpellChecker();
@@ -321,7 +321,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
writer.addDocument(doc);
}
- DirectoryReader ir = DirectoryReader.open(writer, false);
+ DirectoryReader ir = DirectoryReader.open(writer);
WordScorer wordScorer = new LinearInterpoatingScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.85d, new BytesRef(" "), 0.5, 0.4, 0.1);
NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker();
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java
index e4a8ae72b91..51152733bf8 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java
@@ -132,7 +132,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
Document doc = new Document();
doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED));
writer.addDocument(doc);
- DirectoryReader ir = DirectoryReader.open(writer, false);
+ DirectoryReader ir = DirectoryReader.open(writer);
WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" "));
assertWordScorer(wordScorer, testModel);
diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
index 7e9bd14f9f3..dc803a46412 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
@@ -137,6 +137,32 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase {
return null;
}
+ public static void blockAllDataNodes(String repository) {
+ for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
+ ((MockRepository)repositoriesService.repository(repository)).blockOnDataFiles(true);
+ }
+ }
+
+ public static void unblockAllDataNodes(String repository) {
+ for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
+ ((MockRepository)repositoriesService.repository(repository)).unblock();
+ }
+ }
+
+ public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException {
+ if (false == awaitBusy(() -> {
+ for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
+ MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository);
+ if (mockRepository.blocked()) {
+ return true;
+ }
+ }
+ return false;
+ }, timeout.millis(), TimeUnit.MILLISECONDS)) {
+ fail("Timeout waiting for repository block on any data node!!!");
+ }
+ }
+
public static void unblockNode(String node) {
((MockRepository)internalCluster().getInstance(RepositoriesService.class, node).repository("test-repo")).unblock();
}
diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index 65337d4b632..9fb2b0f9989 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -1865,6 +1865,66 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
}
}
+ public void testCloseIndexDuringRestore() throws Exception {
+ Client client = client();
+
+ logger.info("--> creating repository");
+ assertAcked(client.admin().cluster().preparePutRepository("test-repo")
+ .setType("mock").setSettings(Settings.settingsBuilder()
+ .put("location", randomRepoPath())
+ .put("compress", randomBoolean())
+ .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
+ ));
+
+ createIndex("test-idx-1", "test-idx-2");
+ ensureGreen();
+
+ logger.info("--> indexing some data");
+ for (int i = 0; i < 100; i++) {
+ index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i);
+ index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i);
+ }
+ refresh();
+ assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().totalHits(), equalTo(100L));
+ assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().totalHits(), equalTo(100L));
+
+ logger.info("--> snapshot");
+ assertThat(client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap")
+ .setIndices("test-idx-*").setWaitForCompletion(true).get().getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ logger.info("--> deleting indices before restoring");
+ assertAcked(client.admin().indices().prepareDelete("test-idx-*").get());
+
+ blockAllDataNodes("test-repo");
+ logger.info("--> execution will be blocked on all data nodes");
+
+ logger.info("--> start restore");
+ ListenableActionFuture restoreFut = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
+ .setWaitForCompletion(true)
+ .execute();
+
+ logger.info("--> waiting for block to kick in");
+ waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60));
+
+ logger.info("--> close index while restore is running");
+ try {
+ client.admin().indices().prepareClose("test-idx-1").get();
+ fail("Expected closing index to fail during restore");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]"));
+ }
+
+ logger.info("--> unblocking all data nodes");
+ unblockAllDataNodes("test-repo");
+
+ logger.info("--> wait for restore to finish");
+ RestoreSnapshotResponse restoreSnapshotResponse = restoreFut.get();
+ logger.info("--> check that all shards were recovered");
+ assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
+ assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), greaterThan(0));
+ assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0));
+ }
+
public void testDeleteOrphanSnapshot() throws Exception {
Client client = client();
diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
index 427dce714e8..3193aaf458e 100644
--- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
+++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java
@@ -21,13 +21,11 @@ package org.elasticsearch.test;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingService;
-import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.component.LifecycleListener;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.DiscoveryStats;
-import org.elasticsearch.node.service.NodeService;
public class NoopDiscovery implements Discovery {
@@ -42,11 +40,6 @@ public class NoopDiscovery implements Discovery {
return null;
}
- @Override
- public void setNodeService(@Nullable NodeService nodeService) {
-
- }
-
@Override
public void setRoutingService(RoutingService routingService) {
diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
index 4cc7f8f8487..95984da55f6 100644
--- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
+++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java
@@ -20,12 +20,12 @@
package org.elasticsearch.test.geo;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
-import com.spatial4j.core.context.jts.JtsSpatialContext;
-import com.spatial4j.core.distance.DistanceUtils;
-import com.spatial4j.core.exception.InvalidShapeException;
-import com.spatial4j.core.shape.Point;
-import com.spatial4j.core.shape.Rectangle;
-import com.spatial4j.core.shape.impl.Range;
+import org.locationtech.spatial4j.context.jts.JtsSpatialContext;
+import org.locationtech.spatial4j.distance.DistanceUtils;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Point;
+import org.locationtech.spatial4j.shape.Rectangle;
+import org.locationtech.spatial4j.shape.impl.Range;
import com.vividsolutions.jts.algorithm.ConvexHull;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
@@ -45,7 +45,7 @@ import org.junit.Assert;
import java.util.Random;
-import static com.spatial4j.core.shape.SpatialRelation.CONTAINS;
+import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS;
/**
* Random geoshape generation utilities for randomized {@code geo_shape} type testing
diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
index 3400f9637ff..5fff4a61f86 100644
--- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
+++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java
@@ -19,12 +19,12 @@
package org.elasticsearch.test.hamcrest;
-import com.spatial4j.core.shape.Shape;
-import com.spatial4j.core.shape.ShapeCollection;
-import com.spatial4j.core.shape.impl.GeoCircle;
-import com.spatial4j.core.shape.impl.RectangleImpl;
-import com.spatial4j.core.shape.jts.JtsGeometry;
-import com.spatial4j.core.shape.jts.JtsPoint;
+import org.locationtech.spatial4j.shape.Shape;
+import org.locationtech.spatial4j.shape.ShapeCollection;
+import org.locationtech.spatial4j.shape.impl.GeoCircle;
+import org.locationtech.spatial4j.shape.impl.RectangleImpl;
+import org.locationtech.spatial4j.shape.jts.JtsGeometry;
+import org.locationtech.spatial4j.shape.jts.JtsPoint;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.LineString;
diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 5e9bc80b9a9..5fc24094bd3 100644
--- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -243,9 +243,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
// fuzzy queries
assertExplanation(QueryBuilders.fuzzyQuery("field", "the").fuzziness(Fuzziness.fromEdits(2)),
- containsString("field:the field:tree^0.3333333"), true);
+ containsString("field:the (field:tree)^0.3333333"), true);
assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"),
- containsString("field:jumps^0.75"), true);
+ containsString("(field:jumps)^0.75"), true);
// more like this queries
assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, MoreLikeThisQueryBuilder.ids("1"))
diff --git a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json
index 233d6f3e3d7..0ed95e16332 100644
--- a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json
+++ b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json
@@ -9,9 +9,7 @@
},
"my_case_sensitive_keep_filter":{
"type":"keep",
- "keep_words" : ["Hello", "worlD"],
- "enable_position_increments" : false,
- "version" : "4.2"
+ "keep_words" : ["Hello", "worlD"]
}
}
}
diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1
deleted file mode 100644
index dcdeb2cb477..00000000000
--- a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1e0e8243a4410be20c34683034fafa7bb52e55cc
\ No newline at end of file
diff --git a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..74d21bae946
--- /dev/null
+++ b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+3510af19947deadd929123aaf14d69b4bdec759a
\ No newline at end of file
diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1
deleted file mode 100644
index dd5c846363a..00000000000
--- a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-68480974b2f54f519763632a7c1c5d51cbff3805
\ No newline at end of file
diff --git a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..ee6143bec14
--- /dev/null
+++ b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+247ad7c17cb7c742d7a9abd5d9980e4fab815178
\ No newline at end of file
diff --git a/distribution/licenses/lucene-core-5.5.0.jar.sha1 b/distribution/licenses/lucene-core-5.5.0.jar.sha1
deleted file mode 100644
index 70bd0b63bba..00000000000
--- a/distribution/licenses/lucene-core-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a74fd869bb5ad7fe6b4cd29df9543a34aea81164
\ No newline at end of file
diff --git a/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..2d39f84d21e
--- /dev/null
+++ b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+c0712dbec58abad545646edab67d58f7373f5329
\ No newline at end of file
diff --git a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0.jar.sha1
deleted file mode 100644
index f905a2081b6..00000000000
--- a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-437cacec0cfa349b1dee049a7c0e32df3b8ecc07
\ No newline at end of file
diff --git a/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..a3ce82c8a04
--- /dev/null
+++ b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+7573e3efb12dd16fdc991edaf408877dab20c030
\ No newline at end of file
diff --git a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1
deleted file mode 100644
index 6ea3c5a0c13..00000000000
--- a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ecdd913cb7c61a5435591f0a7268b01ab3fc782a
\ No newline at end of file
diff --git a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..9259a2c66c1
--- /dev/null
+++ b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+96ef0a9a43a5fc99d27bb7e7d61517ee4c7e54a4
\ No newline at end of file
diff --git a/distribution/licenses/lucene-join-5.5.0.jar.sha1 b/distribution/licenses/lucene-join-5.5.0.jar.sha1
deleted file mode 100644
index 3cc19b170ed..00000000000
--- a/distribution/licenses/lucene-join-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-af4f55e36e3a7d1f4e9ed9efdccf7e22b767d6e8
\ No newline at end of file
diff --git a/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..4959f5f163c
--- /dev/null
+++ b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+d93de34947d37e31a337cdfed400333588c378d8
\ No newline at end of file
diff --git a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0.jar.sha1
deleted file mode 100644
index 1f4ebc783ee..00000000000
--- a/distribution/licenses/lucene-memory-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-09a327fe9f20fc7e3912ed213bdd5cb4b6d2a65a
\ No newline at end of file
diff --git a/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..5218d0a019e
--- /dev/null
+++ b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+9c292930b1828e68f06509944a5346c141d56fd4
\ No newline at end of file
diff --git a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0.jar.sha1
deleted file mode 100644
index 76131ae81c5..00000000000
--- a/distribution/licenses/lucene-misc-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-504d855a1a38190622fdf990b2298c067e7d60ca
\ No newline at end of file
diff --git a/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..947722edfd3
--- /dev/null
+++ b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+866ed93f48683e877ffa4d9baa1323dcffbc65d7
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0.jar.sha1
deleted file mode 100644
index 5790b2e4776..00000000000
--- a/distribution/licenses/lucene-queries-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-60ca161c1dd5f127907423b6f039b846fb713de0
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..6caf86a6b96
--- /dev/null
+++ b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+967d9c2647bdd4d88961747f7436a5a92aa0385b
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1
deleted file mode 100644
index 8e4a1e66138..00000000000
--- a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0fddc49725b562fd48dff0cff004336ad2a090a4
\ No newline at end of file
diff --git a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..b3e92d3f168
--- /dev/null
+++ b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+981030d83a7504267f3141d7365fad9b46d51465
\ No newline at end of file
diff --git a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1
deleted file mode 100644
index 20c2a1c9527..00000000000
--- a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b7da8e187acd6e4d7781ba41fac8b9082dd27409
\ No newline at end of file
diff --git a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..7b5176c4963
--- /dev/null
+++ b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+707691b1baf22c29020569f5b875d200a4955411
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0.jar.sha1
deleted file mode 100644
index dd645be87e3..00000000000
--- a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c14965bf67179bee93cc8efc58d09a75d230c891
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..9df2a16b886
--- /dev/null
+++ b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+be9e78130a069983f611f484d5b7b87bda0d6370
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..6badc36d361
--- /dev/null
+++ b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+edeef6ce8a58d5e6a074bebf545918d04e8579e1
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1
deleted file mode 100644
index c0b9d4ba838..00000000000
--- a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3e5ab4ea3e2052166100482f7a56b75bfa4ab0ad
\ No newline at end of file
diff --git a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..480ae590aed
--- /dev/null
+++ b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+d86a7ba859576bdcee1dacd8f407ccf71f982c60
\ No newline at end of file
diff --git a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0.jar.sha1
deleted file mode 100644
index adce0756ecf..00000000000
--- a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-51f9d52332f556976a5099817e35d37c69a24597
\ No newline at end of file
diff --git a/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..7835298c4a2
--- /dev/null
+++ b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+a3860de6502576f142dc948eb2005fa4dc0c27c5
\ No newline at end of file
diff --git a/distribution/licenses/spatial4j-0.5.jar.sha1 b/distribution/licenses/spatial4j-0.5.jar.sha1
deleted file mode 100644
index 4bcf7a33b15..00000000000
--- a/distribution/licenses/spatial4j-0.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3
\ No newline at end of file
diff --git a/distribution/licenses/spatial4j-0.6.jar.sha1 b/distribution/licenses/spatial4j-0.6.jar.sha1
new file mode 100644
index 00000000000..740a25b1c90
--- /dev/null
+++ b/distribution/licenses/spatial4j-0.6.jar.sha1
@@ -0,0 +1 @@
+21b15310bddcfd8c72611c180f20cf23279809a3
\ No newline at end of file
diff --git a/docs/java-api/query-dsl/geo-shape-query.asciidoc b/docs/java-api/query-dsl/geo-shape-query.asciidoc
index c753cd72c1a..e08410acbdb 100644
--- a/docs/java-api/query-dsl/geo-shape-query.asciidoc
+++ b/docs/java-api/query-dsl/geo-shape-query.asciidoc
@@ -10,9 +10,9 @@ to your classpath in order to use this type:
[source,xml]
-----------------------------------------------
- com.spatial4j
+ org.locationtech.spatial4j
spatial4j
- 0.4.1 <1>
+ 0.6 <1>
@@ -27,7 +27,7 @@ to your classpath in order to use this type:
-----------------------------------------------
-<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central]
+<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.locationtech.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central]
<2> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.vividsolutions%22%20AND%20a%3A%22jts%22[Maven Central]
[source,java]
diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc
index 8b1f58f7ff0..e649928810b 100644
--- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc
@@ -111,3 +111,35 @@ Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a colon,
Zone names:: Time zone names ('z') cannot be parsed.
Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'..'Z'] will be treated as quoted text. For instance, characters like ':', '.', ' ', '#' and '?' will appear in the resulting time text even they are not embraced within single quotes.
+
+[[time-zones]]
+==== Time zone in date range aggregations
+
+Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter.
+
+Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of
+the the http://joda-time.sourceforge.net/timezones.html[time zone ids] from the TZ database.
+
+The `time_zone` parameter is also applied to rounding in date math expressions. As an example,
+to round to the beginning of the day in the CET time zone, you can do the following:
+
+[source,js]
+--------------------------------------------------
+{
+ "aggs": {
+ "range": {
+ "date_range": {
+ "field": "date",
+ "time_zone": "CET",
+ "ranges": [
+ { "to": "2016-02-15/d" }, <1>
+ { "from": "2016-02-15/d", "to" : "now/d" <2>},
+ { "from": "now/d" },
+ ]
+ }
+ }
+ }
+ }
+--------------------------------------------------
+<1> This date will be converted to `2016-02-15T00:00:00.000+01:00`.
+<2> `now/d` will be rounded to the beginning of the day in the CET time zone.
diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc
index a3072768ca6..5ed979abd0d 100644
--- a/docs/reference/cluster/nodes-info.asciidoc
+++ b/docs/reference/cluster/nodes-info.asciidoc
@@ -17,7 +17,7 @@ The second command selectively retrieves nodes information of only
By default, it just returns all attributes and core settings for a node.
It also allows to get only information on `settings`, `os`, `process`, `jvm`,
-`thread_pool`, `transport`, `http` and `plugins`:
+`thread_pool`, `transport`, `http`, `plugins` and `ingest`:
[source,js]
--------------------------------------------------
@@ -122,3 +122,71 @@ The result will look similar to:
}
}
--------------------------------------------------
+
+[float]
+[[ingest-info]]
+==== Ingest information
+
+`ingest` - if set, the result will contain details about the available
+processors per node:
+
+* `type`: the processor type
+
+The result will look similar to:
+
+[source,js]
+--------------------------------------------------
+{
+ "cluster_name": "elasticsearch",
+ "nodes": {
+ "O70_wBv6S9aPPcAKdSUBtw": {
+ "ingest": {
+ "processors": [
+ {
+ "type": "date"
+ },
+ {
+ "type": "uppercase"
+ },
+ {
+ "type": "set"
+ },
+ {
+ "type": "lowercase"
+ },
+ {
+ "type": "gsub"
+ },
+ {
+ "type": "convert"
+ },
+ {
+ "type": "remove"
+ },
+ {
+ "type": "fail"
+ },
+ {
+ "type": "foreach"
+ },
+ {
+ "type": "split"
+ },
+ {
+ "type": "trim"
+ },
+ {
+ "type": "rename"
+ },
+ {
+ "type": "join"
+ },
+ {
+ "type": "append"
+ }
+ ]
+ }
+ }
+ }
+}
+--------------------------------------------------
\ No newline at end of file
diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc
index 7d6614342b5..f7e1f68dec5 100644
--- a/docs/reference/index-modules.asciidoc
+++ b/docs/reference/index-modules.asciidoc
@@ -148,6 +148,10 @@ Other index settings are available in index modules:
Enable or disable dynamic mapping for an index.
+<>::
+
+ Control over how shards are merged by the background merge process.
+
<>::
Configure custom similarity settings to customize how search results are
@@ -173,6 +177,8 @@ include::index-modules/allocation.asciidoc[]
include::index-modules/mapper.asciidoc[]
+include::index-modules/merge.asciidoc[]
+
include::index-modules/similarity.asciidoc[]
include::index-modules/slowlog.asciidoc[]
diff --git a/docs/reference/index-modules/merge.asciidoc b/docs/reference/index-modules/merge.asciidoc
new file mode 100644
index 00000000000..7e5260f95d4
--- /dev/null
+++ b/docs/reference/index-modules/merge.asciidoc
@@ -0,0 +1,30 @@
+[[index-modules-merge]]
+== Merge
+
+A shard in elasticsearch is a Lucene index, and a Lucene index is broken down
+into segments. Segments are internal storage elements in the index where the
+index data is stored, and are immutable. Smaller segments are periodically
+merged into larger segments to keep the index size at bay and to expunge
+deletes.
+
+The merge process uses auto-throttling to balance the use of hardware
+resources between merging and other activities like search.
+
+[float]
+[[merge-scheduling]]
+=== Merge scheduling
+
+The merge scheduler (ConcurrentMergeScheduler) controls the execution of merge
+operations when they are needed. Merges run in separate threads, and when the
+maximum number of threads is reached, further merges will wait until a merge
+thread becomes available.
+
+The merge scheduler supports the following _dynamic_ setting:
+
+`index.merge.scheduler.max_thread_count`::
+
+ The maximum number of threads that may be merging at once. Defaults to
+ `Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors() / 2))`
+ which works well for a good solid-state-disk (SSD). If your index is on
+ spinning platter drives instead, decrease this to 1.
+
diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc
index 95d7005ee34..0827baa6ea1 100644
--- a/docs/reference/ingest/ingest-node.asciidoc
+++ b/docs/reference/ingest/ingest-node.asciidoc
@@ -620,6 +620,20 @@ but is very useful for bookkeeping and tracing errors to specific processors.
See <> to learn more about the `on_failure` field and error handling in pipelines.
+The <> can be used to figure out what processors are available in a cluster.
+The <> will provide a per node list of what processors are available.
+
+Custom processors must be installed on all nodes. The put pipeline API will fail if a processor specified in a pipeline
+doesn't exist on all nodes. If you rely on custom processor plugins make sure to mark these plugins as mandatory by adding
+`plugin.mandatory` setting to the `config/elasticsearch.yml` file, for example:
+
+[source,yaml]
+--------------------------------------------------
+plugin.mandatory: ingest-attachment,ingest-geoip
+--------------------------------------------------
+
+A node will not start if either of these plugins are not available.
+
[[append-procesesor]]
=== Append Processor
Appends one or more values to an existing array if the field already exists and it is an array.
diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc
index 24951bbe930..01c3c35db54 100644
--- a/docs/reference/query-dsl/has-child-query.asciidoc
+++ b/docs/reference/query-dsl/has-child-query.asciidoc
@@ -23,7 +23,7 @@ an example:
==== Scoring capabilities
The `has_child` also has scoring support. The
-supported score modes are `min`, `max`, `total`, `avg` or `none`. The default is
+supported score modes are `min`, `max`, `sum`, `avg` or `none`. The default is
`none` and yields the same behaviour as in previous versions. If the
score mode is set to another value than `none`, the scores of all the
matching child documents are aggregated into the associated parent
diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc
index 03037207fb0..bef563cd965 100644
--- a/docs/reference/setup/configuration.asciidoc
+++ b/docs/reference/setup/configuration.asciidoc
@@ -43,6 +43,13 @@ using the <> API, with:
curl localhost:9200/_nodes/stats/process?pretty
--------------------------------------------------
+[float]
+[[max-number-of-threads]]
+==== Number of threads
+
+Make sure that the number of threads that the Elasticsearch user can
+create is at least 2048.
+
[float]
[[vm-max-map-count]]
==== Virtual memory
diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1
deleted file mode 100644
index 15c992bf460..00000000000
--- a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4766406a2933ac9df62c49d6619caabb9943aba2
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..d9a29f17c50
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+8d11bf581b0afc25f87a57c06834cd85930d2ffa
\ No newline at end of file
diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
index 7d018adc07f..e717ea6d6fb 100644
--- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
+++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java
@@ -26,6 +26,8 @@ import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.script.groovy.GroovyPlugin;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
+import org.elasticsearch.search.aggregations.bucket.missing.Missing;
+import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
@@ -38,6 +40,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -498,6 +502,42 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
checkUpperLowerBounds(stats, sigma);
}
+ public void testEmptySubAggregation() {
+ SearchResponse searchResponse = client().prepareSearch("idx")
+ .setQuery(matchAllQuery())
+ .addAggregation(terms("value").field("value")
+ .subAggregation(missing("values").field("values")
+ .subAggregation(extendedStats("stats").field("value"))))
+ .execute().actionGet();
+
+ assertHitCount(searchResponse, 10);
+
+ Terms terms = searchResponse.getAggregations().get("value");
+ assertThat(terms, notNullValue());
+ assertThat(terms.getBuckets().size(), equalTo(10));
+
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket.getDocCount(), equalTo(1L));
+
+ Missing missing = bucket.getAggregations().get("values");
+ assertThat(missing, notNullValue());
+ assertThat(missing.getDocCount(), equalTo(0L));
+
+ ExtendedStats stats = missing.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getSumOfSquares(), equalTo(0.0));
+ assertThat(stats.getCount(), equalTo(0L));
+ assertThat(stats.getSum(), equalTo(0.0));
+ assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
+ assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
+ assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
+ assertThat(Double.isNaN(stats.getAvg()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER)), is(true));
+ }
+ }
+
private void assertShardExecutionState(SearchResponse response, int expectedFailures) throws Exception {
ShardSearchFailure[] failures = response.getShardFailures();
@@ -515,4 +555,4 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
assertThat(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), equalTo(stats.getAvg() - (stats.getStdDeviation() * sigma)));
}
-}
\ No newline at end of file
+}
diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java
index fffeabcb807..4689d5fba03 100644
--- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java
+++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java
@@ -169,14 +169,21 @@ public class FunctionScoreTests extends ESIntegTestCase {
}
}
+ /** make sure min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 */
public void testWithEmptyFunctions() throws IOException, ExecutionException, InterruptedException {
assertAcked(prepareCreate("test"));
ensureYellow();
index("test", "testtype", "1", jsonBuilder().startObject().field("text", "test text").endObject());
refresh();
- // make sure that min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253
- float termQueryScore = 0.19178301f;
+ SearchResponse termQuery = client().search(
+ searchRequest().source(
+ searchSource().explain(true).query(
+ termQuery("text", "text")))).get();
+ assertSearchResponse(termQuery);
+ assertThat(termQuery.getHits().totalHits(), equalTo(1L));
+ float termQueryScore = termQuery.getHits().getAt(0).getScore();
+
for (CombineFunction combineFunction : CombineFunction.values()) {
testMinScoreApplied(combineFunction, termQueryScore);
}
diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml
index c23e5da95a1..7f84c1aac8b 100644
--- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml
+++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml
@@ -58,9 +58,6 @@
---
"wait_for_completion=false":
- - skip:
- version: "0.0.0 - "
- reason: breaks other tests by leaving a running reindex behind
- do:
index:
index: source
@@ -79,6 +76,7 @@
dest:
index: dest
- match: {task: '/.+:\d+/'}
+ - set: {task: task}
- is_false: updated
- is_false: version_conflicts
- is_false: batches
@@ -87,6 +85,11 @@
- is_false: took
- is_false: created
+ - do:
+ tasks.list:
+ wait_for_completion: true
+ task_id: $task
+
---
"Response format for version conflict":
- do:
diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml
index 383e945bbf2..94ffa2349a9 100644
--- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml
+++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml
@@ -37,6 +37,7 @@
wait_for_completion: false
index: test
- match: {task: '/.+:\d+/'}
+ - set: {task: task}
- is_false: updated
- is_false: version_conflicts
- is_false: batches
@@ -45,6 +46,11 @@
- is_false: took
- is_false: created
+ - do:
+ tasks.list:
+ wait_for_completion: true
+ task_id: $task
+
---
"Response for version conflict":
- do:
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1
deleted file mode 100644
index 18440dcdc04..00000000000
--- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-69a6e72d322b6643f1b419e6c9cc46623a2404e9
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..538d2ad8216
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+38fda9b86e4f68eb6c9d31fb636a2540da219927
\ No newline at end of file
diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java
index 24890fed5a9..5f3e1644481 100644
--- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java
+++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java
@@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
import org.junit.BeforeClass;
@@ -110,14 +109,14 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase {
int encodedLen1 = IndexableBinaryStringTools.getEncodedLength(
originalArray1, 0, numBytes1);
if (encodedLen1 > encoded1.length)
- encoded1 = new char[ArrayUtil.oversize(encodedLen1, RamUsageEstimator.NUM_BYTES_CHAR)];
+ encoded1 = new char[ArrayUtil.oversize(encodedLen1, Character.BYTES)];
IndexableBinaryStringTools.encode(originalArray1, 0, numBytes1, encoded1,
0, encodedLen1);
int encodedLen2 = IndexableBinaryStringTools.getEncodedLength(original2,
0, numBytes2);
if (encodedLen2 > encoded2.length)
- encoded2 = new char[ArrayUtil.oversize(encodedLen2, RamUsageEstimator.NUM_BYTES_CHAR)];
+ encoded2 = new char[ArrayUtil.oversize(encodedLen2, Character.BYTES)];
IndexableBinaryStringTools.encode(original2, 0, numBytes2, encoded2, 0,
encodedLen2);
@@ -196,7 +195,7 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase {
int encodedLen = IndexableBinaryStringTools.getEncodedLength(binary, 0,
numBytes);
if (encoded.length < encodedLen)
- encoded = new char[ArrayUtil.oversize(encodedLen, RamUsageEstimator.NUM_BYTES_CHAR)];
+ encoded = new char[ArrayUtil.oversize(encodedLen, Character.BYTES)];
IndexableBinaryStringTools.encode(binary, 0, numBytes, encoded, 0,
encodedLen);
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1
deleted file mode 100644
index 832db46564e..00000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e9d68dd5d9fae3349b81de5952d0ee8115c696a4
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..b90115da4ab
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+352fea7a169ada6a7ae18e4ec34559496e09b465
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1
deleted file mode 100644
index 3436526863d..00000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c4735c43440ebcb20f2b6f49f508fedc12f5366c
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..7cbe648e0bd
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+445f5ea7822d0dd6b91364ec119cd6cb4635d285
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1
deleted file mode 100644
index 95b85f7edbd..00000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a31a4d1476d45738a460374d9801dc5ed9b49c1a
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..03c96786de2
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+0b216b7b9ff583bc1382edc8adfee4d4acd02859
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1
deleted file mode 100644
index d5a28231e65..00000000000
--- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1a7505d011aca54c004d0fc86a490d5f054bb903
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1
new file mode 100644
index 00000000000..f27a98f63ba
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1
@@ -0,0 +1 @@
+8d161a8c7e5b5b82f64dc5df2ca46197a3716672
\ No newline at end of file
diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml
index ed752971fcb..67bb7340ce3 100644
--- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml
+++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml
@@ -1,5 +1,11 @@
"Ingest attachment plugin installed":
- do:
- cluster.stats: {}
+ cluster.state: {}
- - match: { nodes.plugins.0.name: ingest-attachment }
+ - set: {master_node: master}
+
+ - do:
+ nodes.info: {}
+
+ - match: { nodes.$master.plugins.0.name: ingest-attachment }
+ - match: { nodes.$master.ingest.processors.11.type: attachment }
diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml
index b522cb77780..b924484aa7d 100644
--- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml
+++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml
@@ -1,5 +1,11 @@
"Ingest plugin installed":
- do:
- cluster.stats: {}
+ cluster.state: {}
- - match: { nodes.plugins.0.name: ingest-geoip }
+ - set: {master_node: master}
+
+ - do:
+ nodes.info: {}
+
+ - match: { nodes.$master.plugins.0.name: ingest-geoip }
+ - match: { nodes.$master.ingest.processors.3.type: geoip }
diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java
index ce78c75d783..802ca1d7653 100644
--- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java
+++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java
@@ -26,7 +26,6 @@ import java.util.Map;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.hash.MurmurHash3;
import org.elasticsearch.common.settings.Settings;
@@ -72,12 +71,10 @@ public class Murmur3FieldMapper extends LongFieldMapper {
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
- if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
- fieldType.setIndexOptions(IndexOptions.NONE);
- defaultFieldType.setIndexOptions(IndexOptions.NONE);
- fieldType.setHasDocValues(true);
- defaultFieldType.setHasDocValues(true);
- }
+ fieldType.setIndexOptions(IndexOptions.NONE);
+ defaultFieldType.setIndexOptions(IndexOptions.NONE);
+ fieldType.setHasDocValues(true);
+ defaultFieldType.setHasDocValues(true);
}
@Override
@@ -97,17 +94,11 @@ public class Murmur3FieldMapper extends LongFieldMapper {
Builder builder = new Builder(name);
// tweaking these settings is no longer allowed, the entire purpose of murmur3 fields is to store a hash
- if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
- if (node.get("doc_values") != null) {
- throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]");
- }
- if (node.get("index") != null) {
- throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]");
- }
+ if (node.get("doc_values") != null) {
+ throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]");
}
-
- if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
- builder.indexOptions(IndexOptions.DOCS);
+ if (node.get("index") != null) {
+ throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]");
}
parseNumberField(builder, name, node, parserContext);
diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java
index 072c0db3e59..16865eb98b6 100644
--- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java
+++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java
@@ -22,10 +22,7 @@ package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
@@ -33,22 +30,14 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.indices.mapper.MapperRegistry;
-import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
-import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.util.Arrays;
-import java.util.Collection;
import java.util.Collections;
public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
- @Override
- protected Collection> getPlugins() {
- return pluginList(InternalSettingsPlugin.class);
- }
-
MapperRegistry mapperRegistry;
IndexService indexService;
DocumentMapperParser parser;
@@ -131,38 +120,4 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
assertTrue(e.getMessage().contains("Setting [index] cannot be modified"));
}
}
-
- public void testDocValuesSettingBackcompat() throws Exception {
- Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
- indexService = createIndex("test_bwc", settings);
- parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
- String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "murmur3")
- .field("doc_values", false)
- .endObject().endObject().endObject().endObject().string();
-
- DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
- Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field");
- assertFalse(mapper.fieldType().hasDocValues());
- }
-
- public void testIndexSettingBackcompat() throws Exception {
- Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
- indexService = createIndex("test_bwc", settings);
- parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
- String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "murmur3")
- .field("index", "not_analyzed")
- .endObject().endObject().endObject().endObject().string();
-
- DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
- Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field");
- assertEquals(IndexOptions.DOCS, mapper.fieldType().indexOptions());
- }
-
- // TODO: add more tests
}
diff --git a/plugins/mapper-size/build.gradle b/plugins/mapper-size/build.gradle
index 7af65d19ef3..7d5aa1ee276 100644
--- a/plugins/mapper-size/build.gradle
+++ b/plugins/mapper-size/build.gradle
@@ -22,3 +22,6 @@ esplugin {
classname 'org.elasticsearch.plugin.mapper.MapperSizePlugin'
}
+// TODO: migrate to points
+compileJava.options.compilerArgs << "-Xlint:-deprecation"
+compileTestJava.options.compilerArgs << "-Xlint:-deprecation"
diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java
index 984e83a438e..cfc7e29486c 100644
--- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java
+++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.size;
import org.apache.lucene.document.Field;
-import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -39,7 +38,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
-import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
public class SizeFieldMapper extends MetadataFieldMapper {
@@ -94,9 +92,6 @@ public class SizeFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
- } else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
- builder.store(parseStore(fieldName, fieldNode.toString(), parserContext));
- iterator.remove();
}
}
return builder;
diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java
index d6b64df9e5d..174520cfada 100644
--- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java
+++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java
@@ -19,30 +19,20 @@
package org.elasticsearch.index.mapper.size;
-import org.elasticsearch.Version;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.indices.IndicesModule;
-import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
-import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -55,15 +45,9 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
MapperService mapperService;
DocumentMapperParser parser;
- @Override
- protected Collection> getPlugins() {
- return pluginList(InternalSettingsPlugin.class); // uses index.version.created
- }
-
@Before
public void before() {
indexService = createIndex("test");
- Map metadataMappers = new HashMap<>();
IndicesModule indices = new IndicesModule();
indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser());
mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext);
@@ -87,31 +71,6 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
}
- public void testSizeEnabledAndStoredBackcompat() throws Exception {
- String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("_size").field("enabled", true).field("store", "yes").endObject()
- .endObject().endObject().string();
- Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
-
- indexService = createIndex("test2", indexSettings);
- MapperRegistry mapperRegistry = new MapperRegistry(
- Collections.emptyMap(),
- Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()));
- parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
- DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
-
- BytesReference source = XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "value")
- .endObject()
- .bytes();
- ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
-
- assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true));
- assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
- }
-
public void testSizeDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", false).endObject()
diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java
index 88b9d187dcf..fe2c32723e2 100644
--- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java
+++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java
@@ -60,7 +60,7 @@ public final class SmbDirectoryWrapper extends FilterDirectory {
static final int CHUNK_SIZE = 8192;
public SmbFSIndexOutput(String name) throws IOException {
- super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) {
+ super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", name, new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) {
// This implementation ensures, that we never write more than CHUNK_SIZE bytes:
@Override
public void write(byte[] b, int offset, int length) throws IOException {
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json
index 9fe9bfe3cad..c3dc0a18b45 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json
@@ -44,13 +44,13 @@
"type" : "string",
"description" : "The name of the tokenizer to use for the analysis"
},
- "detail": {
+ "explain": {
"type" : "boolean",
"description" : "With `true`, outputs more advanced details. (default: false)"
},
"attributes": {
"type" : "list",
- "description" : "A comma-separated list of token attributes to output, this parameter works only with `detail=true`"
+ "description" : "A comma-separated list of token attributes to output, this parameter works only with `explain=true`"
},
"format": {
"type": "enum",
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json
index 43be35a5a86..12f0d11c5fc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json
@@ -12,7 +12,7 @@
},
"metric": {
"type": "list",
- "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins"],
+ "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins", "ingest"],
"description": "A comma-separated list of metrics you wish returned. Leave empty to return all."
}
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json
index 7e8683b3475..5cdeed1b142 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json
@@ -31,6 +31,10 @@
"parent_task": {
"type" : "number",
"description" : "Return tasks with specified parent task id. Set to -1 to return all."
+ },
+ "wait_for_completion": {
+ "type": "boolean",
+ "description": "Wait for the matching tasks to complete (default: false)"
}
}
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml
index 88160ef4f1e..93ffe0d5db1 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml
@@ -75,7 +75,7 @@ setup:
"Detail response with Analyzer":
- do:
indices.analyze:
- body: {"text": "This is troubled", "analyzer": standard, "explain": true}
+ body: {"text": "This is troubled", "analyzer": standard, "explain": "true"}
- length: { detail.analyzer.tokens: 3 }
- match: { detail.analyzer.name: standard }
- match: { detail.analyzer.tokens.0.token: this }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml
index b494161aff1..ced2e9e4850 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml
@@ -1,3 +1,28 @@
+---
+"Check availability of default processors":
+ - do:
+ cluster.state: {}
+
+ - set: {master_node: master}
+
+ - do:
+ nodes.info: {}
+
+ - match: { nodes.$master.ingest.processors.0.type: date }
+ - match: { nodes.$master.ingest.processors.1.type: uppercase }
+ - match: { nodes.$master.ingest.processors.2.type: set }
+ - match: { nodes.$master.ingest.processors.3.type: lowercase }
+ - match: { nodes.$master.ingest.processors.4.type: gsub }
+ - match: { nodes.$master.ingest.processors.5.type: convert }
+ - match: { nodes.$master.ingest.processors.6.type: remove }
+ - match: { nodes.$master.ingest.processors.7.type: fail }
+ - match: { nodes.$master.ingest.processors.8.type: foreach }
+ - match: { nodes.$master.ingest.processors.9.type: split }
+ - match: { nodes.$master.ingest.processors.10.type: trim }
+ - match: { nodes.$master.ingest.processors.11.type: rename }
+ - match: { nodes.$master.ingest.processors.12.type: join }
+ - match: { nodes.$master.ingest.processors.13.type: append }
+
---
"Test basic pipeline crud":
- do:
diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
index 330758223a5..576ecf2d1ee 100644
--- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java
@@ -29,6 +29,10 @@ import org.elasticsearch.test.StreamsUtils;
import org.junit.After;
import org.junit.Before;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.isEmptyString;
+import static org.hamcrest.Matchers.not;
+
public abstract class CliToolTestCase extends ESTestCase {
@Before
@@ -52,8 +56,10 @@ public abstract class CliToolTestCase extends ESTestCase {
public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException {
String output = terminal.getOutput();
- assertFalse(output, output.isEmpty());
+ assertThat(output, not(isEmptyString()));
String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath);
- assertTrue(output, output.contains(expectedDocs));
+ // convert to *nix newlines as MockTerminal used for tests also uses *nix newlines
+ expectedDocs = expectedDocs.replace("\r\n", "\n");
+ assertThat(output, containsString(expectedDocs));
}
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index 4a20d3c3fd6..84d88733802 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -631,27 +631,6 @@ public abstract class ESTestCase extends LuceneTestCase {
assertEquals(expected.isNativeMethod(), actual.isNativeMethod());
}
- /** A runnable that can throw any checked exception. */
- @FunctionalInterface
- public interface ThrowingRunnable {
- void run() throws Throwable;
- }
-
- /** Checks a specific exception class is thrown by the given runnable, and returns it. */
- public static T expectThrows(Class expectedType, ThrowingRunnable runnable) {
- try {
- runnable.run();
- } catch (Throwable e) {
- if (expectedType.isInstance(e)) {
- return expectedType.cast(e);
- }
- AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName());
- assertion.initCause(e);
- throw assertion;
- }
- throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName());
- }
-
protected static long spinForAtLeastOneMillisecond() {
long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
// force at least one millisecond to elapse, but ensure the
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
index 5684717342d..fbc518b136d 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
@@ -19,14 +19,34 @@
package org.elasticsearch.test.rest;
-import com.carrotsearch.randomizedtesting.RandomizedTest;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.FileSystem;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.client.RestException;
+import org.elasticsearch.test.rest.client.RestResponse;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import org.elasticsearch.test.rest.parser.RestTestSuiteParser;
import org.elasticsearch.test.rest.section.DoSection;
@@ -42,24 +62,11 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.FileSystem;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptyMap;
+import static java.util.Collections.sort;
/**
* Runs the clients test suite against an elasticsearch cluster.
@@ -261,7 +268,6 @@ public abstract class ESRestTestCase extends ESTestCase {
@After
public void wipeCluster() throws Exception {
-
// wipe indices
Map deleteIndicesArgs = new HashMap<>();
deleteIndicesArgs.put("index", "*");
@@ -285,6 +291,30 @@ public abstract class ESRestTestCase extends ESTestCase {
adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap());
}
+ /**
+ * Logs a message if there are still running tasks. The reasoning is that any tasks still running are state the is trying to bleed into
+ * other tests.
+ */
+ @After
+ public void logIfThereAreRunningTasks() throws InterruptedException, IOException, RestException {
+ RestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap());
+ Set runningTasks = runningTasks(tasks);
+ // Ignore the task list API - it doens't count against us
+ runningTasks.remove(ListTasksAction.NAME);
+ runningTasks.remove(ListTasksAction.NAME + "[n]");
+ if (runningTasks.isEmpty()) {
+ return;
+ }
+ List stillRunning = new ArrayList<>(runningTasks);
+ sort(stillRunning);
+ logger.info("There are still tasks running after this test that might break subsequent tests {}.", stillRunning);
+ /*
+ * This isn't a higher level log or outright failure because some of these tasks are run by the cluster in the background. If we
+ * could determine that some tasks are run by the user we'd fail the tests if those tasks were running and ignore any background
+ * tasks.
+ */
+ }
+
@AfterClass
public static void close() {
if (restTestExecutionContext != null) {
@@ -365,4 +395,19 @@ public abstract class ESRestTestCase extends ESTestCase {
executableSection.execute(restTestExecutionContext);
}
}
+
+ @SuppressWarnings("unchecked")
+ public Set runningTasks(RestResponse response) throws IOException {
+ Set runningTasks = new HashSet<>();
+ Map nodes = (Map) response.evaluate("nodes");
+ for (Map.Entry node : nodes.entrySet()) {
+ Map