diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index bdb563e001b..b04f959e068 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -68,7 +68,7 @@ public class PluginBuildPlugin extends BuildPlugin { testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" // we "upgrade" these optional deps to provided for plugins, since they will run // with a full elasticsearch server that includes optional deps - provided "com.spatial4j:spatial4j:${project.versions.spatial4j}" + provided "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}" provided "com.vividsolutions:jts:${project.versions.jts}" provided "log4j:log4j:${project.versions.log4j}" provided "log4j:apache-log4j-extras:${project.versions.log4j}" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 7b525d39f53..b5128817fb0 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -68,11 +68,17 @@ class PluginPropertiesTask extends Copy { } Map generateSubstitutions() { + def stringSnap = { version -> + if (version.endsWith("-SNAPSHOT")) { + return version.substring(0, version.length() - 9) + } + return version + } return [ 'name': extension.name, 'description': extension.description, - 'version': extension.version, - 'elasticsearchVersion': VersionProperties.elasticsearch, + 'version': stringSnap(extension.version), + 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch), 'javaVersion': project.targetCompatibility as String, 'isolated': extension.isolated as String, 'classname': extension.classname diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 60a11d951f9..cbe612e5358 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1486,7 +1486,6 @@ - diff --git a/buildSrc/src/main/resources/forbidden/all-signatures.txt b/buildSrc/src/main/resources/forbidden/all-signatures.txt index 3c56a03b293..9bc37005514 100644 --- a/buildSrc/src/main/resources/forbidden/all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/all-signatures.txt @@ -33,20 +33,6 @@ java.util.Formatter#(java.lang.String,java.lang.String,java.util.Locale) java.io.RandomAccessFile java.nio.file.Path#toFile() -@defaultMessage Don't use deprecated lucene apis -org.apache.lucene.index.DocsEnum -org.apache.lucene.index.DocsAndPositionsEnum -org.apache.lucene.queries.TermFilter -org.apache.lucene.queries.TermsFilter -org.apache.lucene.search.Filter -org.apache.lucene.search.FilteredQuery -org.apache.lucene.search.TermRangeFilter -org.apache.lucene.search.NumericRangeFilter -org.apache.lucene.search.PrefixFilter -org.apache.lucene.search.QueryWrapperFilter -org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter -org.apache.lucene.index.IndexWriter#isLocked(org.apache.lucene.store.Directory) - java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead. java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead. diff --git a/buildSrc/src/main/resources/forbidden/core-signatures.txt b/buildSrc/src/main/resources/forbidden/core-signatures.txt index c6ab430595c..059be403a67 100644 --- a/buildSrc/src/main/resources/forbidden/core-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/core-signatures.txt @@ -41,14 +41,10 @@ org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.in org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener) @defaultMessage Pass the precision step from the mappings explicitly instead -org.apache.lucene.search.NumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean) -org.apache.lucene.search.NumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean) -org.apache.lucene.search.NumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean) -org.apache.lucene.search.NumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean) -org.apache.lucene.search.NumericRangeFilter#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean) -org.apache.lucene.search.NumericRangeFilter#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean) -org.apache.lucene.search.NumericRangeFilter#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean) -org.apache.lucene.search.NumericRangeFilter#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean) +org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean) +org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean) +org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean) +org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean) @defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. java.lang.Object#wait() @@ -88,9 +84,6 @@ java.util.concurrent.Future#cancel(boolean) org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[]) org.elasticsearch.common.io.PathUtils#get(java.net.URI) -@defaultMessage Don't use deprecated Query#setBoost, wrap the query into a BoostQuery instead -org.apache.lucene.search.Query#setBoost(float) - @defaultMessage Constructing a DateTime without a time zone is dangerous org.joda.time.DateTime#() org.joda.time.DateTime#(long) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 54b16db2cb1..f75d5a936bb 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,8 +1,8 @@ elasticsearch = 5.0.0 -lucene = 5.5.0 +lucene = 6.0.0-snapshot-bea235f # optional dependencies -spatial4j = 0.5 +spatial4j = 0.6 jts = 1.13 jackson = 2.7.1 log4j = 1.2.17 diff --git a/core/build.gradle b/core/build.gradle index 6e2a1029f61..226158ca094 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -42,6 +42,7 @@ dependencies { compile "org.apache.lucene:lucene-queryparser:${versions.lucene}" compile "org.apache.lucene:lucene-sandbox:${versions.lucene}" compile "org.apache.lucene:lucene-spatial:${versions.lucene}" + compile "org.apache.lucene:lucene-spatial-extras:${versions.lucene}" compile "org.apache.lucene:lucene-spatial3d:${versions.lucene}" compile "org.apache.lucene:lucene-suggest:${versions.lucene}" @@ -72,7 +73,7 @@ dependencies { compile 'org.hdrhistogram:HdrHistogram:2.1.6' // lucene spatial - compile "com.spatial4j:spatial4j:${versions.spatial4j}", optional + compile "org.locationtech.spatial4j:spatial4j:${versions.spatial4j}", optional compile "com.vividsolutions:jts:${versions.jts}", optional // logging @@ -169,11 +170,6 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.logging.Log', 'org.apache.commons.logging.LogFactory', - // from org.apache.lucene.sandbox.queries.regex.JakartaRegexpCapabilities$JakartaRegexMatcher (lucene-sandbox) - 'org.apache.regexp.CharacterIterator', - 'org.apache.regexp.RE', - 'org.apache.regexp.REProgram', - // from org.jboss.netty.handler.ssl.OpenSslEngine (netty) 'org.apache.tomcat.jni.Buffer', 'org.apache.tomcat.jni.Library', @@ -211,7 +207,7 @@ thirdPartyAudit.excludes = [ 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', - // from com.spatial4j.core.io.GeoJSONReader (spatial4j) + // from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j) 'org.noggit.JSONParser', // from org.jboss.netty.container.osgi.NettyBundleActivator (netty) diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 798fac01a7a..4e24944ffac 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -33,7 +33,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.InPlaceMergeSorter; -import org.apache.lucene.util.ToStringUtils; import java.io.IOException; import java.util.ArrayList; @@ -247,14 +246,15 @@ public abstract class BlendedTermQuery extends Query { if (boosts != null) { boost = boosts[i]; } - builder.append(ToStringUtils.boost(boost)); + if (boost != 1f) { + builder.append('^').append(boost); + } builder.append(", "); } if (terms.length > 0) { builder.setLength(builder.length() - 2); } builder.append("])"); - builder.append(ToStringUtils.boost(getBoost())); return builder.toString(); } diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index f153cd53c55..a7c53a56bc4 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; @@ -165,7 +166,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return getFieldQuerySingle(field, queryText, quoted); @@ -267,7 +268,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return super.getFieldQuery(field, queryText, slop); @@ -318,7 +319,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } @@ -380,7 +381,7 @@ public class MapperQueryParser extends QueryParser { clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); } } - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return getFuzzyQuerySingle(field, termStr, minSimilarity); @@ -445,7 +446,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return getPrefixQuerySingle(field, termStr); @@ -520,7 +521,7 @@ public class MapperQueryParser extends QueryParser { for (String token : tlist) { clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD)); } - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } @@ -575,7 +576,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return getWildcardQuerySingle(field, termStr); @@ -704,7 +705,7 @@ public class MapperQueryParser extends QueryParser { } if (clauses.size() == 0) // happens for stopwords return null; - return getBooleanQuery(clauses, true); + return getBooleanQueryCoordDisabled(clauses); } } else { return getRegexpQuerySingle(field, termStr); @@ -739,10 +740,24 @@ public class MapperQueryParser extends QueryParser { setAnalyzer(oldAnalyzer); } } + + /** + * @deprecated review all use of this, don't rely on coord + */ + @Deprecated + protected Query getBooleanQueryCoordDisabled(List clauses) throws ParseException { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setDisableCoord(true); + for (BooleanClause clause : clauses) { + builder.add(clause); + } + return fixNegativeQueryIfNeeded(builder.build()); + } + @Override - protected Query getBooleanQuery(List clauses, boolean disableCoord) throws ParseException { - Query q = super.getBooleanQuery(clauses, disableCoord); + protected Query getBooleanQuery(List clauses) throws ParseException { + Query q = super.getBooleanQuery(clauses); if (q == null) { return null; } @@ -769,7 +784,6 @@ public class MapperQueryParser extends QueryParser { } pq = builder.build(); //make sure that the boost hasn't been set beforehand, otherwise we'd lose it - assert q.getBoost() == 1f; assert q instanceof BoostQuery == false; return pq; } else if (q instanceof MultiPhraseQuery) { diff --git a/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java b/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java index 92f2f443f0a..8d1617d3ab4 100644 --- a/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java +++ b/core/src/main/java/org/apache/lucene/search/XFilteredDocIdSetIterator.java @@ -26,8 +26,7 @@ import java.io.IOException; /** * Abstract decorator class of a DocIdSetIterator * implementation that provides on-demand filter/validation - * mechanism on an underlying DocIdSetIterator. See {@link - * FilteredDocIdSet}. + * mechanism on an underlying DocIdSetIterator. */ public abstract class XFilteredDocIdSetIterator extends DocIdSetIterator { protected DocIdSetIterator _innerIter; diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 11b56bdcfe1..089b649cefe 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -87,7 +87,7 @@ public class CustomFieldQuery extends FieldQuery { if (numTerms > 16) { for (Term[] currentPosTerm : terms) { for (Term term : currentPosTerm) { - super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); + super.flatten(new TermQuery(term), reader, flatQueries, 1F); } } return; @@ -104,7 +104,7 @@ public class CustomFieldQuery extends FieldQuery { queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]); } Query query = queryBuilder.build(); - this.flatten(query, reader, flatQueries, orig.getBoost()); + this.flatten(query, reader, flatQueries, 1F); } else { Term[] t = terms.get(currentPos); for (int i = 0; i < t.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 1c9a5464bb2..eeb4825cb90 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -35,212 +35,10 @@ import java.io.IOException; @SuppressWarnings("deprecation") public class Version { - // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is Beta/RC indicator - // AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release + // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator + // AA values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release // the (internal) format of the id is there so we can easily do after/before checks on the id - // NOTE: indexes created with 3.6 use this constant for e.g. analysis chain emulation (imperfect) - public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_4_0_0; - - public static final int V_0_18_0_ID = /*00*/180099; - public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_1_ID = /*00*/180199; - public static final Version V_0_18_1 = new Version(V_0_18_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_2_ID = /*00*/180299; - public static final Version V_0_18_2 = new Version(V_0_18_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_3_ID = /*00*/180399; - public static final Version V_0_18_3 = new Version(V_0_18_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_4_ID = /*00*/180499; - public static final Version V_0_18_4 = new Version(V_0_18_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_5_ID = /*00*/180599; - public static final Version V_0_18_5 = new Version(V_0_18_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_6_ID = /*00*/180699; - public static final Version V_0_18_6 = new Version(V_0_18_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_7_ID = /*00*/180799; - public static final Version V_0_18_7 = new Version(V_0_18_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_18_8_ID = /*00*/180899; - public static final Version V_0_18_8 = new Version(V_0_18_8_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC1_ID = /*00*/190051; - public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC2_ID = /*00*/190052; - public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_RC3_ID = /*00*/190053; - public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_19_0_ID = /*00*/190099; - public static final Version V_0_19_0 = new Version(V_0_19_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_1_ID = /*00*/190199; - public static final Version V_0_19_1 = new Version(V_0_19_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_2_ID = /*00*/190299; - public static final Version V_0_19_2 = new Version(V_0_19_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_3_ID = /*00*/190399; - public static final Version V_0_19_3 = new Version(V_0_19_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_4_ID = /*00*/190499; - public static final Version V_0_19_4 = new Version(V_0_19_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_5_ID = /*00*/190599; - public static final Version V_0_19_5 = new Version(V_0_19_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_6_ID = /*00*/190699; - public static final Version V_0_19_6 = new Version(V_0_19_6_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_7_ID = /*00*/190799; - public static final Version V_0_19_7 = new Version(V_0_19_7_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_8_ID = /*00*/190899; - public static final Version V_0_19_8 = new Version(V_0_19_8_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_9_ID = /*00*/190999; - public static final Version V_0_19_9 = new Version(V_0_19_9_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_10_ID = /*00*/191099; - public static final Version V_0_19_10 = new Version(V_0_19_10_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_11_ID = /*00*/191199; - public static final Version V_0_19_11 = new Version(V_0_19_11_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_12_ID = /*00*/191299; - public static final Version V_0_19_12 = new Version(V_0_19_12_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_19_13_ID = /*00*/191399; - public static final Version V_0_19_13 = new Version(V_0_19_13_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_20_0_RC1_ID = /*00*/200051; - public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_0_ID = /*00*/200099; - public static final Version V_0_20_0 = new Version(V_0_20_0_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_1_ID = /*00*/200199; - public static final Version V_0_20_1 = new Version(V_0_20_1_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_2_ID = /*00*/200299; - public static final Version V_0_20_2 = new Version(V_0_20_2_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_3_ID = /*00*/200399; - public static final Version V_0_20_3 = new Version(V_0_20_3_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_4_ID = /*00*/200499; - public static final Version V_0_20_4 = new Version(V_0_20_4_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_5_ID = /*00*/200599; - public static final Version V_0_20_5 = new Version(V_0_20_5_ID, LUCENE_3_EMULATION_VERSION); - public static final int V_0_20_6_ID = /*00*/200699; - public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION); - - public static final int V_0_90_0_Beta1_ID = /*00*/900001; - public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_1); - public static final int V_0_90_0_RC1_ID = /*00*/900051; - public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_1); - public static final int V_0_90_0_RC2_ID = /*00*/900052; - public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_2); - public static final int V_0_90_0_ID = /*00*/900099; - public static final Version V_0_90_0 = new Version(V_0_90_0_ID, org.apache.lucene.util.Version.LUCENE_4_2); - public static final int V_0_90_1_ID = /*00*/900199; - public static final Version V_0_90_1 = new Version(V_0_90_1_ID, org.apache.lucene.util.Version.LUCENE_4_3); - public static final int V_0_90_2_ID = /*00*/900299; - public static final Version V_0_90_2 = new Version(V_0_90_2_ID, org.apache.lucene.util.Version.LUCENE_4_3); - public static final int V_0_90_3_ID = /*00*/900399; - public static final Version V_0_90_3 = new Version(V_0_90_3_ID, org.apache.lucene.util.Version.LUCENE_4_4); - public static final int V_0_90_4_ID = /*00*/900499; - public static final Version V_0_90_4 = new Version(V_0_90_4_ID, org.apache.lucene.util.Version.LUCENE_4_4); - public static final int V_0_90_5_ID = /*00*/900599; - public static final Version V_0_90_5 = new Version(V_0_90_5_ID, org.apache.lucene.util.Version.LUCENE_4_4); - public static final int V_0_90_6_ID = /*00*/900699; - public static final Version V_0_90_6 = new Version(V_0_90_6_ID, org.apache.lucene.util.Version.LUCENE_4_5); - public static final int V_0_90_7_ID = /*00*/900799; - public static final Version V_0_90_7 = new Version(V_0_90_7_ID, org.apache.lucene.util.Version.LUCENE_4_5); - public static final int V_0_90_8_ID = /*00*/900899; - public static final Version V_0_90_8 = new Version(V_0_90_8_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_0_90_9_ID = /*00*/900999; - public static final Version V_0_90_9 = new Version(V_0_90_9_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_0_90_10_ID = /*00*/901099; - public static final Version V_0_90_10 = new Version(V_0_90_10_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_0_90_11_ID = /*00*/901199; - public static final Version V_0_90_11 = new Version(V_0_90_11_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_0_90_12_ID = /*00*/901299; - public static final Version V_0_90_12 = new Version(V_0_90_12_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_0_90_13_ID = /*00*/901399; - public static final Version V_0_90_13 = new Version(V_0_90_13_ID, org.apache.lucene.util.Version.LUCENE_4_6); - - public static final int V_1_0_0_Beta1_ID = 1000001; - public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_5); - public static final int V_1_0_0_Beta2_ID = 1000002; - public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_0_RC1_ID = 1000051; - public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_0_RC2_ID = 1000052; - public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_0_ID = 1000099; - public static final Version V_1_0_0 = new Version(V_1_0_0_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_1_ID = 1000199; - public static final Version V_1_0_1 = new Version(V_1_0_1_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_2_ID = 1000299; - public static final Version V_1_0_2 = new Version(V_1_0_2_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_0_3_ID = 1000399; - public static final Version V_1_0_3 = new Version(V_1_0_3_ID, org.apache.lucene.util.Version.LUCENE_4_6); - public static final int V_1_1_0_ID = 1010099; - public static final Version V_1_1_0 = new Version(V_1_1_0_ID, org.apache.lucene.util.Version.LUCENE_4_7); - public static final int V_1_1_1_ID = 1010199; - public static final Version V_1_1_1 = new Version(V_1_1_1_ID, org.apache.lucene.util.Version.LUCENE_4_7); - public static final int V_1_1_2_ID = 1010299; - public static final Version V_1_1_2 = new Version(V_1_1_2_ID, org.apache.lucene.util.Version.LUCENE_4_7); - public static final int V_1_2_0_ID = 1020099; - public static final Version V_1_2_0 = new Version(V_1_2_0_ID, org.apache.lucene.util.Version.LUCENE_4_8); - public static final int V_1_2_1_ID = 1020199; - public static final Version V_1_2_1 = new Version(V_1_2_1_ID, org.apache.lucene.util.Version.LUCENE_4_8); - public static final int V_1_2_2_ID = 1020299; - public static final Version V_1_2_2 = new Version(V_1_2_2_ID, org.apache.lucene.util.Version.LUCENE_4_8); - public static final int V_1_2_3_ID = 1020399; - public static final Version V_1_2_3 = new Version(V_1_2_3_ID, org.apache.lucene.util.Version.LUCENE_4_8); - public static final int V_1_2_4_ID = 1020499; - public static final Version V_1_2_4 = new Version(V_1_2_4_ID, org.apache.lucene.util.Version.LUCENE_4_8); - public static final int V_1_3_0_ID = 1030099; - public static final Version V_1_3_0 = new Version(V_1_3_0_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_1_ID = 1030199; - public static final Version V_1_3_1 = new Version(V_1_3_1_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_2_ID = 1030299; - public static final Version V_1_3_2 = new Version(V_1_3_2_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_3_ID = 1030399; - public static final Version V_1_3_3 = new Version(V_1_3_3_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_4_ID = 1030499; - public static final Version V_1_3_4 = new Version(V_1_3_4_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_5_ID = 1030599; - public static final Version V_1_3_5 = new Version(V_1_3_5_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_6_ID = 1030699; - public static final Version V_1_3_6 = new Version(V_1_3_6_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_7_ID = 1030799; - public static final Version V_1_3_7 = new Version(V_1_3_7_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_8_ID = 1030899; - public static final Version V_1_3_8 = new Version(V_1_3_8_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_3_9_ID = 1030999; - public static final Version V_1_3_9 = new Version(V_1_3_9_ID, org.apache.lucene.util.Version.LUCENE_4_9); - public static final int V_1_4_0_Beta1_ID = 1040001; - public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_10_1); - public static final int V_1_4_0_ID = 1040099; - public static final Version V_1_4_0 = new Version(V_1_4_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_2); - public static final int V_1_4_1_ID = 1040199; - public static final Version V_1_4_1 = new Version(V_1_4_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_2); - public static final int V_1_4_2_ID = 1040299; - public static final Version V_1_4_2 = new Version(V_1_4_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_2); - public static final int V_1_4_3_ID = 1040399; - public static final Version V_1_4_3 = new Version(V_1_4_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_3); - public static final int V_1_4_4_ID = 1040499; - public static final Version V_1_4_4 = new Version(V_1_4_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_3); - public static final int V_1_4_5_ID = 1040599; - public static final Version V_1_4_5 = new Version(V_1_4_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_5_0_ID = 1050099; - public static final Version V_1_5_0 = new Version(V_1_5_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_5_1_ID = 1050199; - public static final Version V_1_5_1 = new Version(V_1_5_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_5_2_ID = 1050299; - public static final Version V_1_5_2 = new Version(V_1_5_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_6_0_ID = 1060099; - public static final Version V_1_6_0 = new Version(V_1_6_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_6_1_ID = 1060199; - public static final Version V_1_6_1 = new Version(V_1_6_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_6_2_ID = 1060299; - public static final Version V_1_6_2 = new Version(V_1_6_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_0_ID = 1070099; - public static final Version V_1_7_0 = new Version(V_1_7_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_1_ID = 1070199; - public static final Version V_1_7_1 = new Version(V_1_7_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_2_ID = 1070299; - public static final Version V_1_7_2 = new Version(V_1_7_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_3_ID = 1070399; - public static final Version V_1_7_3 = new Version(V_1_7_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_4_ID = 1070499; - public static final Version V_1_7_4 = new Version(V_1_7_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); - public static final int V_1_7_5_ID = 1070599; - public static final Version V_1_7_5 = new Version(V_1_7_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4); public static final int V_2_0_0_beta1_ID = 2000001; public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1); @@ -265,7 +63,7 @@ public class Version { public static final int V_2_3_0_ID = 2030099; public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_5_0_0_ID = 5000099; - public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); + public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final Version CURRENT = V_5_0_0; static { @@ -303,198 +101,6 @@ public class Version { return V_2_0_0_beta2; case V_2_0_0_beta1_ID: return V_2_0_0_beta1; - case V_1_7_5_ID: - return V_1_7_5; - case V_1_7_4_ID: - return V_1_7_4; - case V_1_7_3_ID: - return V_1_7_3; - case V_1_7_2_ID: - return V_1_7_2; - case V_1_7_1_ID: - return V_1_7_1; - case V_1_7_0_ID: - return V_1_7_0; - case V_1_6_2_ID: - return V_1_6_2; - case V_1_6_1_ID: - return V_1_6_1; - case V_1_6_0_ID: - return V_1_6_0; - case V_1_5_2_ID: - return V_1_5_2; - case V_1_5_1_ID: - return V_1_5_1; - case V_1_5_0_ID: - return V_1_5_0; - case V_1_4_5_ID: - return V_1_4_5; - case V_1_4_4_ID: - return V_1_4_4; - case V_1_4_3_ID: - return V_1_4_3; - case V_1_4_2_ID: - return V_1_4_2; - case V_1_4_1_ID: - return V_1_4_1; - case V_1_4_0_ID: - return V_1_4_0; - case V_1_4_0_Beta1_ID: - return V_1_4_0_Beta1; - case V_1_3_9_ID: - return V_1_3_9; - case V_1_3_8_ID: - return V_1_3_8; - case V_1_3_7_ID: - return V_1_3_7; - case V_1_3_6_ID: - return V_1_3_6; - case V_1_3_5_ID: - return V_1_3_5; - case V_1_3_4_ID: - return V_1_3_4; - case V_1_3_3_ID: - return V_1_3_3; - case V_1_3_2_ID: - return V_1_3_2; - case V_1_3_1_ID: - return V_1_3_1; - case V_1_3_0_ID: - return V_1_3_0; - case V_1_2_4_ID: - return V_1_2_4; - case V_1_2_3_ID: - return V_1_2_3; - case V_1_2_2_ID: - return V_1_2_2; - case V_1_2_1_ID: - return V_1_2_1; - case V_1_2_0_ID: - return V_1_2_0; - case V_1_1_2_ID: - return V_1_1_2; - case V_1_1_1_ID: - return V_1_1_1; - case V_1_1_0_ID: - return V_1_1_0; - case V_1_0_3_ID: - return V_1_0_3; - case V_1_0_2_ID: - return V_1_0_2; - case V_1_0_1_ID: - return V_1_0_1; - case V_1_0_0_ID: - return V_1_0_0; - case V_1_0_0_RC2_ID: - return V_1_0_0_RC2; - case V_1_0_0_RC1_ID: - return V_1_0_0_RC1; - case V_1_0_0_Beta2_ID: - return V_1_0_0_Beta2; - case V_1_0_0_Beta1_ID: - return V_1_0_0_Beta1; - case V_0_90_13_ID: - return V_0_90_13; - case V_0_90_12_ID: - return V_0_90_12; - case V_0_90_11_ID: - return V_0_90_11; - case V_0_90_10_ID: - return V_0_90_10; - case V_0_90_9_ID: - return V_0_90_9; - case V_0_90_8_ID: - return V_0_90_8; - case V_0_90_7_ID: - return V_0_90_7; - case V_0_90_6_ID: - return V_0_90_6; - case V_0_90_5_ID: - return V_0_90_5; - case V_0_90_4_ID: - return V_0_90_4; - case V_0_90_3_ID: - return V_0_90_3; - case V_0_90_2_ID: - return V_0_90_2; - case V_0_90_1_ID: - return V_0_90_1; - case V_0_90_0_ID: - return V_0_90_0; - case V_0_90_0_RC2_ID: - return V_0_90_0_RC2; - case V_0_90_0_RC1_ID: - return V_0_90_0_RC1; - case V_0_90_0_Beta1_ID: - return V_0_90_0_Beta1; - case V_0_20_6_ID: - return V_0_20_6; - case V_0_20_5_ID: - return V_0_20_5; - case V_0_20_4_ID: - return V_0_20_4; - case V_0_20_3_ID: - return V_0_20_3; - case V_0_20_2_ID: - return V_0_20_2; - case V_0_20_1_ID: - return V_0_20_1; - case V_0_20_0_ID: - return V_0_20_0; - case V_0_20_0_RC1_ID: - return V_0_20_0_RC1; - case V_0_19_0_RC1_ID: - return V_0_19_0_RC1; - case V_0_19_0_RC2_ID: - return V_0_19_0_RC2; - case V_0_19_0_RC3_ID: - return V_0_19_0_RC3; - case V_0_19_0_ID: - return V_0_19_0; - case V_0_19_1_ID: - return V_0_19_1; - case V_0_19_2_ID: - return V_0_19_2; - case V_0_19_3_ID: - return V_0_19_3; - case V_0_19_4_ID: - return V_0_19_4; - case V_0_19_5_ID: - return V_0_19_5; - case V_0_19_6_ID: - return V_0_19_6; - case V_0_19_7_ID: - return V_0_19_7; - case V_0_19_8_ID: - return V_0_19_8; - case V_0_19_9_ID: - return V_0_19_9; - case V_0_19_10_ID: - return V_0_19_10; - case V_0_19_11_ID: - return V_0_19_11; - case V_0_19_12_ID: - return V_0_19_12; - case V_0_19_13_ID: - return V_0_19_13; - case V_0_18_0_ID: - return V_0_18_0; - case V_0_18_1_ID: - return V_0_18_1; - case V_0_18_2_ID: - return V_0_18_2; - case V_0_18_3_ID: - return V_0_18_3; - case V_0_18_4_ID: - return V_0_18_4; - case V_0_18_5_ID: - return V_0_18_5; - case V_0_18_6_ID: - return V_0_18_6; - case V_0_18_7_ID: - return V_0_18_7; - case V_0_18_8_ID: - return V_0_18_8; default: return new Version(id, org.apache.lucene.util.Version.LATEST); } @@ -531,15 +137,23 @@ public class Version { if (!Strings.hasLength(version)) { return Version.CURRENT; } + final boolean snapshot; // this is some BWC for 2.x and before indices + if (snapshot = version.endsWith("-SNAPSHOT")) { + version = version.substring(0, version.length() - 9); + } String[] parts = version.split("\\.|\\-"); if (parts.length < 3 || parts.length > 4) { throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version); } try { - + final int rawMajor = Integer.parseInt(parts[0]); + if (rawMajor >= 5 && snapshot) { // we don't support snapshot as part of the version here anymore + throw new IllegalArgumentException("illegal version format - snapshots are only supported until version 2.x"); + } + final int betaOffset = rawMajor < 5 ? 0 : 25; //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo - final int major = Integer.parseInt(parts[0]) * 1000000; + final int major = rawMajor * 1000000; final int minor = Integer.parseInt(parts[1]) * 10000; final int revision = Integer.parseInt(parts[2]) * 100; @@ -547,11 +161,17 @@ public class Version { int build = 99; if (parts.length == 4) { String buildStr = parts[3]; - if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { - build = Integer.parseInt(buildStr.substring(4)); - } - if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { + if (buildStr.startsWith("alpha")) { + assert rawMajor >= 5 : "major must be >= 5 but was " + major; + build = Integer.parseInt(buildStr.substring(5)); + assert build < 25 : "expected a beta build but " + build + " >= 25"; + } else if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { + build = betaOffset + Integer.parseInt(buildStr.substring(4)); + assert build < 50 : "expected a beta build but " + build + " >= 50"; + } else if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { build = Integer.parseInt(buildStr.substring(2)) + 50; + } else { + throw new IllegalArgumentException("unable to parse version " + version); } } @@ -614,13 +234,16 @@ public class Version { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(major).append('.').append(minor).append('.').append(revision); - if (isBeta()) { + if (isAlpha()) { + sb.append("-alpha"); + sb.append(build); + } else if (isBeta()) { if (major >= 2) { sb.append("-beta"); } else { sb.append(".Beta"); } - sb.append(build); + sb.append(major < 5 ? build : build-25); } else if (build < 99) { if (major >= 2) { sb.append("-rc"); @@ -656,7 +279,16 @@ public class Version { } public boolean isBeta() { - return build < 50; + return major < 5 ? build < 50 : build >= 25 && build < 50; + } + + /** + * Returns true iff this version is an alpha version + * Note: This has been introduced in elasticsearch version 5. Previous versions will never + * have an alpha version. + */ + public boolean isAlpha() { + return major < 5 ? false : build < 25; } public boolean isRC() { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index ccae17b1eeb..946897a2c97 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -197,9 +197,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo numberOfPendingTasks = in.readInt(); timedOut = in.readBoolean(); numberOfInFlightFetch = in.readInt(); - if (in.getVersion().onOrAfter(Version.V_1_7_0)) { - delayedUnassignedShards= in.readInt(); - } + delayedUnassignedShards= in.readInt(); taskMaxWaitingTime = TimeValue.readTimeValue(in); } @@ -212,9 +210,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo out.writeInt(numberOfPendingTasks); out.writeBoolean(timedOut); out.writeInt(numberOfInFlightFetch); - if (out.getVersion().onOrAfter(Version.V_1_7_0)) { - out.writeInt(delayedUnassignedShards); - } + out.writeInt(delayedUnassignedShards); taskMaxWaitingTime.writeTo(out); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 1fa64d5e7b7..11c542863b5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.process.ProcessInfo; @@ -74,12 +75,15 @@ public class NodeInfo extends BaseNodeResponse { @Nullable private PluginsAndModules plugins; - NodeInfo() { + @Nullable + private IngestInfo ingest; + + public NodeInfo() { } public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map serviceAttributes, @Nullable Settings settings, @Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, - @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) { + @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest) { super(node); this.version = version; this.build = build; @@ -92,6 +96,7 @@ public class NodeInfo extends BaseNodeResponse { this.transport = transport; this.http = http; this.plugins = plugins; + this.ingest = ingest; } /** @@ -176,6 +181,11 @@ public class NodeInfo extends BaseNodeResponse { return this.plugins; } + @Nullable + public IngestInfo getIngest() { + return ingest; + } + public static NodeInfo readNodeInfo(StreamInput in) throws IOException { NodeInfo nodeInfo = new NodeInfo(); nodeInfo.readFrom(in); @@ -220,6 +230,10 @@ public class NodeInfo extends BaseNodeResponse { plugins = new PluginsAndModules(); plugins.readFrom(in); } + if (in.readBoolean()) { + ingest = new IngestInfo(); + ingest.readFrom(in); + } } @Override @@ -285,5 +299,11 @@ public class NodeInfo extends BaseNodeResponse { out.writeBoolean(true); plugins.writeTo(out); } + if (ingest == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + ingest.writeTo(out); + } } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 46a36f1d8a3..66c5cfd65d4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -38,6 +38,7 @@ public class NodesInfoRequest extends BaseNodesRequest { private boolean transport = true; private boolean http = true; private boolean plugins = true; + private boolean ingest = true; public NodesInfoRequest() { } @@ -62,6 +63,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = false; http = false; plugins = false; + ingest = false; return this; } @@ -77,6 +79,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = true; http = true; plugins = true; + ingest = true; return this; } @@ -202,6 +205,22 @@ public class NodesInfoRequest extends BaseNodesRequest { return plugins; } + /** + * Should information about ingest be returned + * @param ingest true if you want info + */ + public NodesInfoRequest ingest(boolean ingest) { + this.ingest = ingest; + return this; + } + + /** + * @return true if information about ingest is requested + */ + public boolean ingest() { + return ingest; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -213,6 +232,7 @@ public class NodesInfoRequest extends BaseNodesRequest { transport = in.readBoolean(); http = in.readBoolean(); plugins = in.readBoolean(); + ingest = in.readBoolean(); } @Override @@ -226,5 +246,6 @@ public class NodesInfoRequest extends BaseNodesRequest { out.writeBoolean(transport); out.writeBoolean(http); out.writeBoolean(plugins); + out.writeBoolean(ingest); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java index d73b3d47dfb..fc484012379 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java @@ -110,4 +110,12 @@ public class NodesInfoRequestBuilder extends NodesOperationRequestBuilder implements To if (nodeInfo.getPlugins() != null) { nodeInfo.getPlugins().toXContent(builder, params); } + if (nodeInfo.getIngest() != null) { + nodeInfo.getIngest().toXContent(builder, params); + } builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 2a763910ddd..b14450f9eb1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -80,7 +80,7 @@ public class TransportNodesInfoAction extends TransportNodesAction { return super.match(task) && task instanceof CancellableTask; } - public CancelTasksRequest reason(String reason) { + /** + * Set the reason for canceling the task. + */ + public CancelTasksRequest setReason(String reason) { this.reason = reason; return this; } - public String reason() { + /** + * The reason for canceling the task. + */ + public String getReason() { return reason; } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index b07e540d792..874f230587d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -84,21 +84,21 @@ public class TransportCancelTasksAction extends TransportTasksAction operation) { - if (request.taskId().isSet() == false) { + if (request.getTaskId().isSet() == false) { // we are only checking one task, we can optimize it - CancellableTask task = taskManager.getCancellableTask(request.taskId().getId()); + CancellableTask task = taskManager.getCancellableTask(request.getTaskId().getId()); if (task != null) { if (request.match(task)) { operation.accept(task); } else { - throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support this operation"); + throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support this operation"); } } else { - if (taskManager.getTask(request.taskId().getId()) != null) { + if (taskManager.getTask(request.getTaskId().getId()) != null) { // The task exists, but doesn't support cancellation - throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support cancellation"); + throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support cancellation"); } else { - throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.taskId()); + throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.getTaskId()); } } } else { @@ -113,14 +113,14 @@ public class TransportCancelTasksAction extends TransportTasksAction removeBanOnNodes(cancellableTask, nodes)); - Set childNodes = taskManager.cancel(cancellableTask, request.reason(), banLock::onTaskFinished); + Set childNodes = taskManager.cancel(cancellableTask, request.getReason(), banLock::onTaskFinished); if (childNodes != null) { if (childNodes.isEmpty()) { logger.trace("cancelling task {} with no children", cancellableTask.getId()); return cancellableTask.taskInfo(clusterService.localNode(), false); } else { logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes); - setBanOnNodes(request.reason(), cancellableTask, childNodes, banLock); + setBanOnNodes(request.getReason(), cancellableTask, childNodes, banLock); return cancellableTask.taskInfo(clusterService.localNode(), false); } } else { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java index 6bf8ac3e1ef..3fe743fc36a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequest.java @@ -31,31 +31,49 @@ import java.io.IOException; public class ListTasksRequest extends BaseTasksRequest { private boolean detailed = false; + private boolean waitForCompletion = false; /** * Should the detailed task information be returned. */ - public boolean detailed() { + public boolean getDetailed() { return this.detailed; } /** * Should the detailed task information be returned. */ - public ListTasksRequest detailed(boolean detailed) { + public ListTasksRequest setDetailed(boolean detailed) { this.detailed = detailed; return this; } + /** + * Should this request wait for all found tasks to complete? + */ + public boolean getWaitForCompletion() { + return waitForCompletion; + } + + /** + * Should this request wait for all found tasks to complete? + */ + public ListTasksRequest setWaitForCompletion(boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + return this; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); detailed = in.readBoolean(); + waitForCompletion = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(detailed); + out.writeBoolean(waitForCompletion); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java index 2b462014f43..1385781125a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java @@ -35,7 +35,15 @@ public class ListTasksRequestBuilder extends TasksRequestBuilder { + private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); + private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); @Inject public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { @@ -59,7 +67,34 @@ public class TransportListTasksAction extends TransportTasksAction operation) { + if (false == request.getWaitForCompletion()) { + super.processTasks(request, operation); + return; + } + // If we should wait for completion then we have to intercept every found task and wait for it to leave the manager. + TimeValue timeout = request.getTimeout(); + if (timeout == null) { + timeout = DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT; + } + long timeoutTime = System.nanoTime() + timeout.nanos(); + super.processTasks(request, operation.andThen((Task t) -> { + while (System.nanoTime() - timeoutTime < 0) { + if (taskManager.getTask(t.getId()) == null) { + return; + } + try { + Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis()); + } catch (InterruptedException e) { + throw new ElasticsearchException("Interrupted waiting for completion of [{}]", e, t); + } + } + throw new ElasticsearchTimeoutException("Timed out waiting for completion of [{}]", t); + })); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 0c883ccb377..4087fe6cfce 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -98,7 +98,7 @@ public class TransportClusterStatsAction extends TransportNodesAction shardsStats = new ArrayList<>(); for (IndexService indexService : indicesService) { diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 31a911207ab..aafd9ee75a4 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -20,6 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; @@ -27,24 +31,32 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashMap; +import java.util.Map; + public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; + private final TransportNodesInfoAction nodesInfoAction; @Inject public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) { + IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService, + TransportNodesInfoAction nodesInfoAction) { super(settings, PutPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new); this.clusterService = clusterService; + this.nodesInfoAction = nodesInfoAction; this.pipelineStore = nodeService.getIngestService().getPipelineStore(); } @@ -60,7 +72,28 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction listener) throws Exception { - pipelineStore.put(clusterService, request, listener); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + nodesInfoRequest.clear(); + nodesInfoRequest.ingest(true); + nodesInfoAction.execute(nodesInfoRequest, new ActionListener() { + @Override + public void onResponse(NodesInfoResponse nodeInfos) { + try { + Map ingestInfos = new HashMap<>(); + for (NodeInfo nodeInfo : nodeInfos) { + ingestInfos.put(nodeInfo.getNode(), nodeInfo.getIngest()); + } + pipelineStore.put(clusterService, ingestInfos, request, listener); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java index f7da48a667b..f1045387259 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java @@ -71,7 +71,7 @@ public class BaseTasksRequest> extends * Sets the list of action masks for the actions that should be returned */ @SuppressWarnings("unchecked") - public final Request actions(String... actions) { + public final Request setActions(String... actions) { this.actions = actions; return (Request) this; } @@ -79,16 +79,16 @@ public class BaseTasksRequest> extends /** * Return the list of action masks for the actions that should be returned */ - public String[] actions() { + public String[] getActions() { return actions; } - public final String[] nodesIds() { + public final String[] getNodesIds() { return nodesIds; } @SuppressWarnings("unchecked") - public final Request nodesIds(String... nodesIds) { + public final Request setNodesIds(String... nodesIds) { this.nodesIds = nodesIds; return (Request) this; } @@ -98,12 +98,12 @@ public class BaseTasksRequest> extends * * By default tasks with any ids are returned. */ - public TaskId taskId() { + public TaskId getTaskId() { return taskId; } @SuppressWarnings("unchecked") - public final Request taskId(TaskId taskId) { + public final Request setTaskId(TaskId taskId) { this.taskId = taskId; return (Request) this; } @@ -112,29 +112,29 @@ public class BaseTasksRequest> extends /** * Returns the parent task id that tasks should be filtered by */ - public TaskId parentTaskId() { + public TaskId getParentTaskId() { return parentTaskId; } @SuppressWarnings("unchecked") - public Request parentTaskId(TaskId parentTaskId) { + public Request setParentTaskId(TaskId parentTaskId) { this.parentTaskId = parentTaskId; return (Request) this; } - public TimeValue timeout() { + public TimeValue getTimeout() { return this.timeout; } @SuppressWarnings("unchecked") - public final Request timeout(TimeValue timeout) { + public final Request setTimeout(TimeValue timeout) { this.timeout = timeout; return (Request) this; } @SuppressWarnings("unchecked") - public final Request timeout(String timeout) { + public final Request setTimeout(String timeout) { this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return (Request) this; } @@ -162,11 +162,11 @@ public class BaseTasksRequest> extends } public boolean match(Task task) { - if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) { + if (getActions() != null && getActions().length > 0 && Regex.simpleMatch(getActions(), task.getAction()) == false) { return false; } - if (taskId().isSet() == false) { - if(taskId().getId() != task.getId()) { + if (getTaskId().isSet() == false) { + if(getTaskId().getId() != task.getId()) { return false; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java index a7265ce9998..a510a847c62 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -35,19 +35,19 @@ public class TasksRequestBuilder , Res @SuppressWarnings("unchecked") public final RequestBuilder setNodesIds(String... nodesIds) { - request.nodesIds(nodesIds); + request.setNodesIds(nodesIds); return (RequestBuilder) this; } @SuppressWarnings("unchecked") public final RequestBuilder setActions(String... actions) { - request.actions(actions); + request.setActions(actions); return (RequestBuilder) this; } @SuppressWarnings("unchecked") public final RequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); + request.setTimeout(timeout); return (RequestBuilder) this; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index f10b9f23327..53c0d851997 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -124,25 +124,25 @@ public abstract class TransportTasksAction< } protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) { - if (request.taskId().isSet()) { - return clusterState.nodes().resolveNodesIds(request.nodesIds()); + if (request.getTaskId().isSet()) { + return clusterState.nodes().resolveNodesIds(request.getNodesIds()); } else { - return new String[]{request.taskId().getNodeId()}; + return new String[]{request.getTaskId().getNodeId()}; } } protected void processTasks(TasksRequest request, Consumer operation) { - if (request.taskId().isSet() == false) { + if (request.getTaskId().isSet() == false) { // we are only checking one task, we can optimize it - Task task = taskManager.getTask(request.taskId().getId()); + Task task = taskManager.getTask(request.getTaskId().getId()); if (task != null) { if (request.match(task)) { operation.accept((OperationTask) task); } else { - throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.taskId()); + throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.getTaskId()); } } else { - throw new ResourceNotFoundException("task [{}] is missing", request.taskId()); + throw new ResourceNotFoundException("task [{}] is missing", request.getTaskId()); } } else { for (Task task : taskManager.getTasks().values()) { @@ -224,8 +224,8 @@ public abstract class TransportTasksAction< } } else { TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); - if (request.timeout() != null) { - builder.withTimeout(request.timeout()); + if (request.getTimeout() != null) { + builder.withTimeout(request.getTimeout()); } builder.withCompress(transportCompress()); for (int i = 0; i < nodesIds.length; i++) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 6ac3c477fd7..433dd4498a4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -225,7 +225,7 @@ final class BootstrapCheck { static class MaxNumberOfThreadsCheck implements Check { - private final long maxNumberOfThreadsThreshold = 1 << 15; + private final long maxNumberOfThreadsThreshold = 1 << 11; @Override public boolean check() { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index df26df29800..6639f9bdbd6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -19,12 +19,14 @@ package org.elasticsearch.cluster.metadata; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -37,11 +39,14 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; /** * Service responsible for submitting open/close index requests @@ -78,7 +83,7 @@ public class MetaDataIndexStateService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { - List indicesToClose = new ArrayList<>(); + Set indicesToClose = new HashSet<>(); for (String index : request.indices()) { IndexMetaData indexMetaData = currentState.metaData().index(index); if (indexMetaData == null) { @@ -94,6 +99,28 @@ public class MetaDataIndexStateService extends AbstractComponent { return currentState; } + // Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index + // is found as closing an index that is being restored makes the index unusable (it cannot be recovered). + RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); + if (restore != null) { + Set indicesToFail = null; + for (RestoreInProgress.Entry entry : restore.entries()) { + for (ObjectObjectCursor shard : entry.shards()) { + if (!shard.value.state().completed()) { + if (indicesToClose.contains(shard.key.getIndexName())) { + if (indicesToFail == null) { + indicesToFail = new HashSet<>(); + } + indicesToFail.add(shard.key.getIndexName()); + } + } + } + } + if (indicesToFail != null) { + throw new IllegalArgumentException("Cannot close indices that are being restored: " + indicesToFail); + } + } + logger.info("closing indices [{}]", indicesAsString); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); diff --git a/core/src/main/java/org/elasticsearch/common/ParseField.java b/core/src/main/java/org/elasticsearch/common/ParseField.java index 0aad723e6fb..a0978723d0e 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseField.java +++ b/core/src/main/java/org/elasticsearch/common/ParseField.java @@ -18,26 +18,23 @@ */ package org.elasticsearch.common; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; -import java.util.EnumSet; import java.util.HashSet; /** * Holds a field that can be found in a request while parsing and its different variants, which may be deprecated. */ public class ParseField { + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ParseField.class)); + private final String camelCaseName; private final String underscoreName; private final String[] deprecatedNames; private String allReplacedWith = null; - static final EnumSet EMPTY_FLAGS = EnumSet.noneOf(Flag.class); - static final EnumSet STRICT_FLAGS = EnumSet.of(Flag.STRICT); - - enum Flag { - STRICT - } - public ParseField(String value, String... deprecatedNames) { camelCaseName = Strings.toCamelCase(value); underscoreName = Strings.toUnderscoreCase(value); @@ -80,19 +77,21 @@ public class ParseField { return parseField; } - boolean match(String currentFieldName, EnumSet flags) { + boolean match(String currentFieldName, boolean strict) { if (allReplacedWith == null && (currentFieldName.equals(camelCaseName) || currentFieldName.equals(underscoreName))) { return true; } String msg; for (String depName : deprecatedNames) { if (currentFieldName.equals(depName)) { - if (flags.contains(Flag.STRICT)) { - msg = "Deprecated field [" + currentFieldName + "] used, expected [" + underscoreName + "] instead"; - if (allReplacedWith != null) { - msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]"; - } + msg = "Deprecated field [" + currentFieldName + "] used, expected [" + underscoreName + "] instead"; + if (allReplacedWith != null) { + msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]"; + } + if (strict) { throw new IllegalArgumentException(msg); + } else { + DEPRECATION_LOGGER.deprecated(msg); } return true; } diff --git a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java index 137e5b4a966..9866694a230 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java +++ b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java @@ -21,29 +21,28 @@ package org.elasticsearch.common; import org.elasticsearch.common.settings.Settings; -import java.util.EnumSet; - /** * Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField} * against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting. */ public class ParseFieldMatcher { public static final String PARSE_STRICT = "index.query.parse.strict"; - public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS); - public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS); + public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(false); + public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(true); - private final EnumSet parseFlags; + private final boolean strict; public ParseFieldMatcher(Settings settings) { - if (settings.getAsBoolean(PARSE_STRICT, false)) { - this.parseFlags = EnumSet.of(ParseField.Flag.STRICT); - } else { - this.parseFlags = ParseField.EMPTY_FLAGS; - } + this(settings.getAsBoolean(PARSE_STRICT, false)); } - public ParseFieldMatcher(EnumSet parseFlags) { - this.parseFlags = parseFlags; + public ParseFieldMatcher(boolean strict) { + this.strict = strict; + } + + /** Should deprecated settings be rejected? */ + public boolean isStrict() { + return strict; } /** @@ -55,6 +54,6 @@ public class ParseFieldMatcher { * @return true whenever the parse field that we are looking for was found, false otherwise */ public boolean match(String fieldName, ParseField parseField) { - return parseField.match(fieldName, parseFlags); + return parseField.match(fieldName, strict); } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java index 8af203f2ce8..dff4277e96f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java +++ b/core/src/main/java/org/elasticsearch/common/geo/ShapesAvailability.java @@ -29,7 +29,7 @@ public class ShapesAvailability { static { boolean xSPATIAL4J_AVAILABLE; try { - Class.forName("com.spatial4j.core.shape.impl.PointImpl"); + Class.forName("org.locationtech.spatial4j.shape.impl.PointImpl"); xSPATIAL4J_AVAILABLE = true; } catch (Throwable t) { xSPATIAL4J_AVAILABLE = false; diff --git a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java index 42650275b4b..7ee2bfbd42f 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java +++ b/core/src/main/java/org/elasticsearch/common/geo/XShapeCollection.java @@ -19,9 +19,9 @@ package org.elasticsearch.common.geo; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; import java.util.List; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index bda0106f2b6..97ef6561c9b 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Circle; +import org.locationtech.spatial4j.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 426cbbf7800..ab997387ea1 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 420f61a6799..d21f47cf053 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.XShapeCollection; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 8c2870e1e09..cbc9002c785 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index e69c0abe4f8..51f4fd232c5 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 12b16254957..b8f2c8137ef 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -19,8 +19,8 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.geo.XShapeCollection; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 394892d909d..6ee679b7308 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.geo.XShapeCollection; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 1cee6525e7a..30b7e370f22 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.shape.Point; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index ab480cfbc24..4a9c8441072 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -19,8 +19,8 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 8c3ea3f3261..d0c73964575 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.context.jts.JtsSpatialContext; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; @@ -81,9 +81,9 @@ public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWri * this normally isn't allowed. */ protected final boolean multiPolygonMayOverlap = false; - /** @see com.spatial4j.core.shape.jts.JtsGeometry#validate() */ + /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#validate() */ protected final boolean autoValidateJtsGeometry = true; - /** @see com.spatial4j.core.shape.jts.JtsGeometry#index() */ + /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#index() */ protected final boolean autoIndexJtsGeometry = true;//may want to turn off once SpatialStrategy impls do it. protected ShapeBuilder() { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java deleted file mode 100644 index d31cd3835ec..00000000000 --- a/core/src/main/java/org/elasticsearch/common/lucene/IndexCacheableQuery.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene; - -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Weight; - -import java.io.IOException; -import java.util.Objects; - -/** - * Base implementation for a query which is cacheable at the index level but - * not the segment level as usually expected. - */ -public abstract class IndexCacheableQuery extends Query { - - private Object readerCacheKey; - - @Override - public Query rewrite(IndexReader reader) throws IOException { - if (reader.getCoreCacheKey() != this.readerCacheKey) { - IndexCacheableQuery rewritten = (IndexCacheableQuery) clone(); - rewritten.readerCacheKey = reader.getCoreCacheKey(); - return rewritten; - } - return super.rewrite(reader); - } - - @Override - public boolean equals(Object obj) { - return super.equals(obj) - && readerCacheKey == ((IndexCacheableQuery) obj).readerCacheKey; - } - - @Override - public int hashCode() { - return 31 * super.hashCode() + Objects.hashCode(readerCacheKey); - } - - @Override - public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - if (readerCacheKey == null) { - throw new IllegalStateException("Rewrite first"); - } - if (readerCacheKey != searcher.getIndexReader().getCoreCacheKey()) { - throw new IllegalStateException("Must create weight on the same reader which has been used for rewriting"); - } - return doCreateWeight(searcher, needsScores); - } - - /** Create a {@link Weight} for this query. - * @see Query#createWeight(IndexSearcher, boolean) - */ - public abstract Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 43051f95b9a..39f34ad867e 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -88,7 +88,7 @@ import java.util.Objects; public class Lucene { public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; - public static final String LATEST_CODEC = "Lucene54"; + public static final String LATEST_CODEC = "Lucene60"; static { Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); @@ -236,13 +236,8 @@ public class Lucene { protected Object doBody(String segmentFileName) throws IOException { try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { final int format = input.readInt(); - final int actualFormat; if (format == CodecUtil.CODEC_MAGIC) { - // 4.0+ - actualFormat = CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, Integer.MAX_VALUE); - if (actualFormat >= SegmentInfos.VERSION_48) { - CodecUtil.checksumEntireFile(input); - } + CodecUtil.checksumEntireFile(input); } // legacy.... } @@ -382,7 +377,7 @@ public class Lucene { writeMissingValue(out, comparatorSource.missingValue(sortField.getReverse())); } else { writeSortType(out, sortField.getType()); - writeMissingValue(out, sortField.missingValue); + writeMissingValue(out, sortField.getMissingValue()); } out.writeBoolean(sortField.getReverse()); } @@ -684,7 +679,7 @@ public class Lucene { segmentsFileName = infos.getSegmentsFileName(); this.dir = dir; userData = infos.getUserData(); - files = Collections.unmodifiableCollection(infos.files(dir, true)); + files = Collections.unmodifiableCollection(infos.files(true)); generation = infos.getGeneration(); segmentCount = infos.size(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index e0e03b18e12..9851ac12a1a 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -226,7 +226,7 @@ public final class AllTermQuery extends Query { @Override public String toString(String field) { - return new TermQuery(term).toString(field) + ToStringUtils.boost(getBoost()); + return new TermQuery(term).toString(field); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 83de725a83a..971cbdafffe 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; @@ -118,9 +117,7 @@ public class FilterableTermsEnum extends TermsEnum { }; } - BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc()); - builder.or(docs); - bits = builder.build().bits(); + bits = BitSet.of(docs, context.reader().maxDoc()); // Count how many docs are in our filtered set // TODO make this lazy-loaded only for those that need it? diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index d1efdc3ede2..754d76fed27 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; -import org.apache.lucene.util.ToStringUtils; import java.io.IOException; import java.util.ArrayList; @@ -51,7 +50,7 @@ public class MultiPhrasePrefixQuery extends Query { /** * Sets the phrase slop for this query. * - * @see org.apache.lucene.search.PhraseQuery#setSlop(int) + * @see org.apache.lucene.search.PhraseQuery.Builder#setSlop(int) */ public void setSlop(int s) { slop = s; @@ -64,7 +63,7 @@ public class MultiPhrasePrefixQuery extends Query { /** * Sets the phrase slop for this query. * - * @see org.apache.lucene.search.PhraseQuery#getSlop() + * @see org.apache.lucene.search.PhraseQuery.Builder#getSlop() */ public int getSlop() { return slop; @@ -73,7 +72,7 @@ public class MultiPhrasePrefixQuery extends Query { /** * Add a single term at the next position in the phrase. * - * @see org.apache.lucene.search.PhraseQuery#add(Term) + * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term) */ public void add(Term term) { add(new Term[]{term}); @@ -83,7 +82,7 @@ public class MultiPhrasePrefixQuery extends Query { * Add multiple terms at the next position in the phrase. Any of the terms * may match. * - * @see org.apache.lucene.search.PhraseQuery#add(Term) + * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term) */ public void add(Term[] terms) { int position = 0; @@ -98,7 +97,7 @@ public class MultiPhrasePrefixQuery extends Query { * * @param terms the terms * @param position the position of the terms provided as argument - * @see org.apache.lucene.search.PhraseQuery#add(Term, int) + * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term, int) */ public void add(Term[] terms, int position) { if (termArrays.size() == 0) @@ -231,8 +230,6 @@ public class MultiPhrasePrefixQuery extends Query { buffer.append(slop); } - buffer.append(ToStringUtils.boost(getBoost())); - return buffer.toString(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 73c3fc9400d..53ee2295ae4 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PrefixQuery; @@ -132,11 +131,7 @@ public class Queries { builder.add(clause); } builder.setMinimumNumberShouldMatch(msm); - BooleanQuery bq = builder.build(); - if (query.getBoost() != 1f) { - return new BoostQuery(bq, query.getBoost()); - } - return bq; + return builder.build(); } else { return query; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index a7b7300c9b6..54e8c0e3488 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -29,7 +29,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.ToStringUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -102,7 +101,7 @@ public class FiltersFunctionScoreQuery extends Query { } } - Query subQuery; + final Query subQuery; final FilterFunction[] filterFunctions; final ScoreMode scoreMode; final float maxBoost; @@ -136,9 +135,7 @@ public class FiltersFunctionScoreQuery extends Query { Query newQ = subQuery.rewrite(reader); if (newQ == subQuery) return this; - FiltersFunctionScoreQuery bq = (FiltersFunctionScoreQuery) this.clone(); - bq.subQuery = newQ; - return bq; + return new FiltersFunctionScoreQuery(newQ, scoreMode, filterFunctions, maxBoost, minScore, combineFunction); } @Override @@ -355,7 +352,6 @@ public class FiltersFunctionScoreQuery extends Query { sb.append("{filter(").append(filterFunction.filter).append("), function [").append(filterFunction.function).append("]}"); } sb.append("])"); - sb.append(ToStringUtils.boost(getBoost())); return sb.toString(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 3cf4f3e48f7..646076a3a17 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -28,7 +28,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.ToStringUtils; import java.io.IOException; import java.util.Objects; @@ -41,7 +40,7 @@ public class FunctionScoreQuery extends Query { public static final float DEFAULT_MAX_BOOST = Float.MAX_VALUE; - Query subQuery; + final Query subQuery; final ScoreFunction function; final float maxBoost; final CombineFunction combineFunction; @@ -84,9 +83,7 @@ public class FunctionScoreQuery extends Query { if (newQ == subQuery) { return this; } - FunctionScoreQuery bq = (FunctionScoreQuery) this.clone(); - bq.subQuery = newQ; - return bq; + return new FunctionScoreQuery(newQ, function, minScore, combineFunction, maxBoost); } @Override @@ -205,7 +202,6 @@ public class FunctionScoreQuery extends Query { public String toString(String field) { StringBuilder sb = new StringBuilder(); sb.append("function score (").append(subQuery.toString(field)).append(",function=").append(function).append(')'); - sb.append(ToStringUtils.boost(getBoost())); return sb.toString(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java b/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java index 616e43ac422..5e5fc826264 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/store/FilterIndexOutput.java @@ -30,7 +30,7 @@ public class FilterIndexOutput extends IndexOutput { protected final IndexOutput out; public FilterIndexOutput(String resourceDescription, IndexOutput out) { - super(resourceDescription); + super(resourceDescription, out.getName()); this.out = out; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 69ef795812d..b8b75147740 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; @@ -44,6 +45,7 @@ import org.elasticsearch.indices.IndicesRequestCache; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.function.Predicate; @@ -133,8 +135,15 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, EngineConfig.INDEX_CODEC_SETTING, IndexWarmer.INDEX_NORMS_LOADING_SETTING, - // this sucks but we can't really validate all the analyzers/similarity in here - Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX), // this allows similarity settings to be passed + // validate that built-in similarities don't get redefined + Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX, (s) -> { + Map groups = s.getAsGroups(); + for (String key : SimilarityService.BUILT_IN.keySet()) { + if (groups.containsKey(key)) { + throw new IllegalArgumentException("illegal value for [index.similarity."+ key + "] cannot redefine built-in similarity"); + } + } + }), // this allows similarity settings to be passed Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed ))); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 0b4e43744a5..c31b905abbf 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -30,16 +31,19 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -177,7 +181,7 @@ public class Setting extends ToXContentToBytes { /** * Returns true iff this setting is present in the given settings object. Otherwise false */ - public final boolean exists(Settings settings) { + public boolean exists(Settings settings) { return settings.get(getKey()) != null; } @@ -505,17 +509,45 @@ public class Setting extends ToXContentToBytes { throw new ElasticsearchException(ex); } } - public static Setting groupSetting(String key, boolean dynamic, Scope scope) { + return groupSetting(key, dynamic, scope, (s) -> {}); + } + public static Setting groupSetting(String key, boolean dynamic, Scope scope, Consumer validator) { return new Setting(new GroupKey(key), (s) -> "", (s) -> null, dynamic, scope) { @Override public boolean isGroupSetting() { return true; } + @Override + public String getRaw(Settings settings) { + Settings subSettings = get(settings); + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + subSettings.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + @Override public Settings get(Settings settings) { - return settings.getByPrefix(getKey()); + Settings byPrefix = settings.getByPrefix(getKey()); + validator.accept(byPrefix); + return byPrefix; + } + + @Override + public boolean exists(Settings settings) { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + if (entry.getKey().startsWith(key)) { + return true; + } + } + return false; } @Override diff --git a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java index faa377baccd..dfef49ca9d4 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java @@ -41,9 +41,9 @@ public class BigArrays { /** Page size in bytes: 16KB */ public static final int PAGE_SIZE_IN_BYTES = 1 << 14; - public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_BYTE; - public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_INT; - public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_LONG; + public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES; + public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Integer.BYTES; + public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Long.BYTES; public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF; /** Returns the next size to grow when working with parallel arrays that may have different page sizes or number of bytes per element. */ @@ -490,7 +490,7 @@ public class BigArrays { if (minSize <= array.size()) { return array; } - final long newSize = overSize(minSize, BYTE_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_BYTE); + final long newSize = overSize(minSize, BYTE_PAGE_SIZE, 1); return resize(array, newSize); } @@ -573,7 +573,7 @@ public class BigArrays { if (minSize <= array.size()) { return array; } - final long newSize = overSize(minSize, INT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_INT); + final long newSize = overSize(minSize, INT_PAGE_SIZE, Integer.BYTES); return resize(array, newSize); } @@ -623,7 +623,7 @@ public class BigArrays { if (minSize <= array.size()) { return array; } - final long newSize = overSize(minSize, LONG_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_LONG); + final long newSize = overSize(minSize, LONG_PAGE_SIZE, Long.BYTES); return resize(array, newSize); } @@ -670,7 +670,7 @@ public class BigArrays { if (minSize <= array.size()) { return array; } - final long newSize = overSize(minSize, LONG_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_LONG); + final long newSize = overSize(minSize, LONG_PAGE_SIZE, Long.BYTES); return resize(array, newSize); } @@ -717,7 +717,7 @@ public class BigArrays { if (minSize <= array.size()) { return array; } - final long newSize = overSize(minSize, INT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_FLOAT); + final long newSize = overSize(minSize, INT_PAGE_SIZE, Float.BYTES); return resize(array, newSize); } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java index da4bc28408d..cac3132385f 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java @@ -127,7 +127,7 @@ final class BigByteArray extends AbstractBigArray implements ByteArray { @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_BYTE; + return 1; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java index 1f739188377..4aab593affe 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java @@ -71,7 +71,7 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray { @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_INT; + return Integer.BYTES; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java index f6fc2d8fce0..1fa79a9f3db 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java @@ -71,7 +71,7 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray { @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_FLOAT; + return Float.BYTES; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java index 1c0e9fe017c..4ce5fc7acee 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java @@ -88,7 +88,7 @@ final class BigIntArray extends AbstractBigArray implements IntArray { @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_INT; + return Integer.BYTES; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java index fe0323ba67c..2e3248143b4 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java @@ -70,7 +70,7 @@ final class BigLongArray extends AbstractBigArray implements LongArray { @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_LONG; + return Long.BYTES; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index ab923a195ca..19a41d3096d 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -65,7 +65,7 @@ final class BigObjectArray extends AbstractBigArray implements ObjectArray @Override protected int numBytesPerElement() { - return RamUsageEstimator.NUM_BYTES_INT; + return Integer.BYTES; } /** Change the size of this array. Content between indexes 0 and min(size(), newSize) will be preserved. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java index fdc94d53849..b9dd6859ce0 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java +++ b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java @@ -388,7 +388,7 @@ public class BloomFilter { } public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_LONG * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16; + return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16; } } diff --git a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index 739677342f7..36e0b19c782 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/core/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -333,7 +333,7 @@ public class CollectionUtils { assert indices.length >= numValues; if (numValues > 1) { new InPlaceMergeSorter() { - final Comparator comparator = BytesRef.getUTF8SortedAsUnicodeComparator(); + final Comparator comparator = Comparator.naturalOrder(); @Override protected int compare(int i, int j) { return comparator.compare(bytes.get(scratch, indices[i]), bytes.get(scratch1, indices[j])); diff --git a/core/src/main/java/org/elasticsearch/discovery/Discovery.java b/core/src/main/java/org/elasticsearch/discovery/Discovery.java index b96417381ff..778e2d15053 100644 --- a/core/src/main/java/org/elasticsearch/discovery/Discovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/Discovery.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.node.service.NodeService; import java.io.IOException; @@ -41,11 +40,6 @@ public interface Discovery extends LifecycleComponent { String nodeDescription(); - /** - * Here as a hack to solve dep injection problem... - */ - void setNodeService(@Nullable NodeService nodeService); - /** * Another hack to solve dep injection problem..., note, this will be called before * any start is called. diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 661de5260c1..0462d6a8d8d 100644 --- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.ClusterSettings; @@ -45,7 +44,6 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; -import org.elasticsearch.node.service.NodeService; import java.util.HashSet; import java.util.Queue; @@ -84,11 +82,6 @@ public class LocalDiscovery extends AbstractLifecycleComponent implem this.discoverySettings = new DiscoverySettings(settings, clusterSettings); } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - // nothing to do here - } - @Override public void setRoutingService(RoutingService routingService) { this.routingService = routingService; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java b/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java index f845cbe1fed..b9ce7901369 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java @@ -20,8 +20,6 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.node.service.NodeService; /** * @@ -30,6 +28,4 @@ public interface DiscoveryNodesProvider { DiscoveryNodes nodes(); - @Nullable - NodeService nodeService(); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index fb0f7a61966..c0dd78b4e5f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -60,7 +60,6 @@ import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -137,10 +136,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen /** counts the time this node has joined the cluster or have elected it self as master */ private final AtomicLong clusterJoinsCounter = new AtomicLong(); - @Nullable - private NodeService nodeService; - - // must initialized in doStart(), when we have the routingService set private volatile NodeJoinController nodeJoinController; @@ -192,11 +187,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen transportService.registerRequestHandler(DISCOVERY_REJOIN_ACTION_NAME, RejoinClusterRequest::new, ThreadPool.Names.SAME, new RejoinClusterRequestHandler()); } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - this.nodeService = nodeService; - } - @Override public void setRoutingService(RoutingService routingService) { this.routingService = routingService; @@ -292,11 +282,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen return clusterService.state().nodes(); } - @Override - public NodeService nodeService() { - return this.nodeService; - } - @Override public boolean nodeHasJoinedClusterOnce() { return clusterJoinsCounter.get() > 0; diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index c3c1cd3b734..8795a7e7d15 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -116,7 +116,7 @@ public abstract class MetaDataStateFormat { final Path finalStatePath = stateLocation.resolve(fileName); try { final String resourceDesc = "MetaDataStateFormat.write(path=\"" + tmpStatePath + "\")"; - try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) { + try (OutputStreamIndexOutput out = new OutputStreamIndexOutput(resourceDesc, fileName, Files.newOutputStream(tmpStatePath), BUFFER_SIZE)) { CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION); out.writeInt(format.index()); out.writeLong(version); diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index e2b6f0d27ed..c94e1370c01 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -104,6 +104,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { && matchingNodes.isNodeMatchBySyncID(nodeWithHighestMatch) == true) { // we found a better match that has a full sync id match, the existing allocation is not fully synced // so we found a better one, cancel this one + logger.debug("cancelling allocation of replica on [{}], sync id match found on node [{}]", + currentNode, nodeWithHighestMatch); it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA, "existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]", null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); diff --git a/core/src/main/java/org/elasticsearch/index/Index.java b/core/src/main/java/org/elasticsearch/index/Index.java index 80bf3c31b44..983b977d611 100644 --- a/core/src/main/java/org/elasticsearch/index/Index.java +++ b/core/src/main/java/org/elasticsearch/index/Index.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -50,7 +51,14 @@ public class Index implements Writeable { @Override public String toString() { - return "[" + name + "]"; + /* + * If we have a uuid we put it in the toString so it'll show up in logs which is useful as more and more things use the uuid rather + * than the name as the lookup key for the index. + */ + if (ClusterState.UNKNOWN_UUID.equals(uuid)) { + return "[" + name + "]"; + } + return "[" + name + "/" + uuid + "]"; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index a27b49b9618..b7481e78496 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -20,8 +20,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.NumericTokenStream; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; import org.apache.lucene.analysis.br.BrazilianAnalyzer; @@ -300,7 +300,7 @@ public class Analysis { *

Although most analyzers generate character terms (CharTermAttribute), * some token only contain binary terms (BinaryTermAttribute, * CharTermAttribute being a special type of BinaryTermAttribute), such as - * {@link NumericTokenStream} and unsuitable for highlighting and + * {@link LegacyNumericTokenStream} and unsuitable for highlighting and * more-like-this queries which expect character terms.

*/ public static boolean isCharacterTokenStream(TokenStream tokenStream) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java index f28f374220a..82ed526323d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenFilterFactory.java @@ -21,10 +21,8 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -43,14 +41,11 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { public static final int SIDE_BACK = 2; private final int side; - private org.elasticsearch.Version esVersion; - public EdgeNGramTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); this.side = parseSide(settings.get("side", "front")); - this.esVersion = org.elasticsearch.Version.indexCreated(indexSettings.getSettings()); } static int parseSide(String side) { @@ -70,15 +65,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { result = new ReverseStringFilter(result); } - if (version.onOrAfter(Version.LUCENE_4_3) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) { - /* - * We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version. - * Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version. - */ - result = new EdgeNGramTokenFilter(result, minGram, maxGram); - } else { - result = new Lucene43EdgeNGramTokenFilter(result, minGram, maxGram); - } + result = new EdgeNGramTokenFilter(result, minGram, maxGram); // side=BACK is not supported anymore but applying ReverseStringFilter up-front and after the token filter has the same effect if (side == SIDE_BACK) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java index 2c50d8d4d66..77d122393ce 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -33,55 +31,33 @@ import static org.elasticsearch.index.analysis.NGramTokenizerFactory.parseTokenC /** * */ -@SuppressWarnings("deprecation") public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final int minGram; private final int maxGram; - private final Lucene43EdgeNGramTokenizer.Side side; - private final CharMatcher matcher; - - protected org.elasticsearch.Version esVersion; public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - this.side = Lucene43EdgeNGramTokenizer.Side.getSide(settings.get("side", Lucene43EdgeNGramTokenizer.DEFAULT_SIDE.getLabel())); this.matcher = parseTokenChars(settings.getAsArray("token_chars")); - this.esVersion = indexSettings.getIndexVersionCreated(); } @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_3) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) { - /* - * We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version. - * Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version. - */ - if (side == Lucene43EdgeNGramTokenizer.Side.BACK) { - throw new IllegalArgumentException("side=back is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.2) but beware that it might cause highlighting bugs." - + " To obtain the same behavior as the previous version please use \"edgeNGram\" filter which still supports side=back" - + " in combination with a \"keyword\" tokenizer"); - } - final Version version = this.version == Version.LUCENE_4_3 ? Version.LUCENE_4_4 : this.version; // always use 4.4 or higher - if (matcher == null) { - return new EdgeNGramTokenizer(minGram, maxGram); - } else { - return new EdgeNGramTokenizer(minGram, maxGram) { - @Override - protected boolean isTokenChar(int chr) { - return matcher.isTokenChar(chr); - } - }; - } + if (matcher == null) { + return new EdgeNGramTokenizer(minGram, maxGram); } else { - return new Lucene43EdgeNGramTokenizer(side, minGram, maxGram); + return new EdgeNGramTokenizer(minGram, maxGram) { + @Override + protected boolean isTokenChar(int chr) { + return matcher.isTokenChar(chr); + } + }; } } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java index 82b8df70741..ab00657313d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeepWordFilterFactory.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.KeepWordFilter; -import org.apache.lucene.analysis.miscellaneous.Lucene43KeepWordFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; @@ -40,9 +39,6 @@ import org.elasticsearch.index.IndexSettings; *
  • {@value #KEEP_WORDS_PATH_KEY} an reference to a file containing the words * / tokens to keep. Note: this is an alternative to {@value #KEEP_WORDS_KEY} if * both are set an exception will be thrown.
  • - *
  • {@value #ENABLE_POS_INC_KEY} true iff the filter should - * maintain position increments for dropped tokens. The default is - * true.
  • *
  • {@value #KEEP_WORDS_CASE_KEY} to use case sensitive keep words. The * default is false which corresponds to case-sensitive.
  • * @@ -51,10 +47,11 @@ import org.elasticsearch.index.IndexSettings; */ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { private final CharArraySet keepWords; - private final boolean enablePositionIncrements; private static final String KEEP_WORDS_KEY = "keep_words"; private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path"; private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc + + // unsupported ancient option private static final String ENABLE_POS_INC_KEY = "enable_position_increments"; public KeepWordFilterFactory(IndexSettings indexSettings, @@ -68,26 +65,14 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { throw new IllegalArgumentException("keep requires either `" + KEEP_WORDS_KEY + "` or `" + KEEP_WORDS_PATH_KEY + "` to be configured"); } - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get(ENABLE_POS_INC_KEY) != null) { - throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(ENABLE_POS_INC_KEY) != null) { + throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain"); } - enablePositionIncrements = version.onOrAfter(Version.LUCENE_4_4) ? true : settings.getAsBoolean(ENABLE_POS_INC_KEY, true); - this.keepWords = Analysis.getWordSet(env, settings, KEEP_WORDS_KEY); - } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new KeepWordFilter(tokenStream, keepWords); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43KeepWordFilter(enablePositionIncrements, tokenStream, keepWords); - return filter; - } + return new KeepWordFilter(tokenStream, keepWords); } - - } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java index 3af93bc79de..e55e24ccae0 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/LengthTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.LengthFilter; -import org.apache.lucene.analysis.miscellaneous.Lucene43LengthFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -34,28 +32,21 @@ public class LengthTokenFilterFactory extends AbstractTokenFilterFactory { private final int min; private final int max; - private final boolean enablePositionIncrements; + + // ancient unsupported option private static final String ENABLE_POS_INC_KEY = "enable_position_increments"; public LengthTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); min = settings.getAsInt("min", 0); max = settings.getAsInt("max", Integer.MAX_VALUE); - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get(ENABLE_POS_INC_KEY) != null) { - throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(ENABLE_POS_INC_KEY) != null) { + throw new IllegalArgumentException(ENABLE_POS_INC_KEY + " is not supported anymore. Please fix your analysis chain"); } - enablePositionIncrements = version.onOrAfter(Version.LUCENE_4_4) ? true : settings.getAsBoolean(ENABLE_POS_INC_KEY, true); } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new LengthFilter(tokenStream, min, max); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43LengthFilter(enablePositionIncrements, tokenStream, min, max); - return filter; - } + return new LengthFilter(tokenStream, min, max); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java index 80e0aeb32eb..0905b310735 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenFilterFactory.java @@ -20,9 +20,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.ngram.Lucene43NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -44,14 +42,8 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); } - @SuppressWarnings("deprecation") @Override public TokenStream create(TokenStream tokenStream) { - final Version version = this.version == Version.LUCENE_4_3 ? Version.LUCENE_4_4 : this.version; // we supported it since 4.3 - if (version.onOrAfter(Version.LUCENE_4_3)) { - return new NGramTokenFilter(tokenStream, minGram, maxGram); - } else { - return new Lucene43NGramTokenFilter(tokenStream, minGram, maxGram); - } + return new NGramTokenFilter(tokenStream, minGram, maxGram); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java index 03b502d4478..21a13eab573 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericDateTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.joda.time.format.DateTimeFormatter; import java.io.IOException; @@ -30,11 +30,11 @@ import java.io.IOException; public class NumericDateTokenizer extends NumericTokenizer { public NumericDateTokenizer(int precisionStep, char[] buffer, DateTimeFormatter dateTimeFormatter) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, dateTimeFormatter); + super(new LegacyNumericTokenStream(precisionStep), buffer, dateTimeFormatter); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(((DateTimeFormatter) extra).parseMillis(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java index 793c3edd612..6be6199b529 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericDoubleTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericDoubleTokenizer extends NumericTokenizer { public NumericDoubleTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setDoubleValue(Double.parseDouble(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java index 02d42b8eef8..b7b2f6577f9 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericFloatTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericFloatTokenizer extends NumericTokenizer { public NumericFloatTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setFloatValue(Float.parseFloat(value)); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java index 3f758c4900e..3d8b1309997 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericIntegerTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericIntegerTokenizer extends NumericTokenizer { public NumericIntegerTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setIntValue(Integer.parseInt(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java index d926371ca48..63abd2d9ed4 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericLongTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import java.io.IOException; @@ -29,11 +29,11 @@ import java.io.IOException; public class NumericLongTokenizer extends NumericTokenizer { public NumericLongTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(Long.parseLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java index ccd87628988..6339b11636e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NumericTokenizer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.util.Attribute; import org.apache.lucene.util.AttributeFactory; @@ -45,12 +45,12 @@ public abstract class NumericTokenizer extends Tokenizer { }; } - private final NumericTokenStream numericTokenStream; + private final LegacyNumericTokenStream numericTokenStream; private final char[] buffer; protected final Object extra; private boolean started; - protected NumericTokenizer(NumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException { + protected NumericTokenizer(LegacyNumericTokenStream numericTokenStream, char[] buffer, Object extra) throws IOException { super(delegatingAttributeFactory(numericTokenStream)); this.numericTokenStream = numericTokenStream; // Add attributes from the numeric token stream, this works fine because the attribute factory delegates to numericTokenStream @@ -95,5 +95,5 @@ public abstract class NumericTokenizer extends Tokenizer { numericTokenStream.close(); } - protected abstract void setValue(NumericTokenStream tokenStream, String value); + protected abstract void setValue(LegacyNumericTokenStream tokenStream, String value); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java index 74150c13bf6..f00988f4ad2 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzerProvider.java @@ -40,13 +40,7 @@ public class PatternAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final StandardAnalyzer standardAnalyzer; - private final Version esVersion; public StandardAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - this.esVersion = indexSettings.getIndexVersionCreated(); - final CharArraySet defaultStopwords; - if (esVersion.onOrAfter(Version.V_1_0_0_Beta1)) { - defaultStopwords = CharArraySet.EMPTY_SET; - } else { - defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - } - + final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); int maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); standardAnalyzer = new StandardAnalyzer(stopWords); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java index 156ad1ff07e..a755e54db17 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzer.java @@ -26,10 +26,8 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.StopwordAnalyzerBase; -import org.apache.lucene.util.Version; public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { @@ -47,12 +45,7 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { @Override protected TokenStreamComponents createComponents(final String fieldName) { - final Tokenizer src; - if (getVersion().onOrAfter(Version.LUCENE_4_7_0)) { - src = new StandardTokenizer(); - } else { - src = new StandardTokenizer40(); - } + final Tokenizer src = new StandardTokenizer(); TokenStream tok = new StandardFilter(src); tok = new LowerCaseFilter(tok); if (!stopwords.isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java index b2e95737ee1..a3c65b0a17b 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardHtmlStripAnalyzerProvider.java @@ -32,17 +32,10 @@ import org.elasticsearch.index.IndexSettings; public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final StandardHtmlStripAnalyzer analyzer; - private final Version esVersion; public StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - this.esVersion = indexSettings.getIndexVersionCreated(); - final CharArraySet defaultStopwords; - if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) { - defaultStopwords = CharArraySet.EMPTY_SET; - } else { - defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - } + final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); analyzer = new StandardHtmlStripAnalyzer(stopWords); analyzer.setVersion(version); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java index d0702bdbc4b..3f142a1ab43 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.std40.StandardTokenizer40; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -41,14 +39,8 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory { @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_7_0)) { - StandardTokenizer tokenizer = new StandardTokenizer(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } else { - StandardTokenizer40 tokenizer = new StandardTokenizer40(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } + StandardTokenizer tokenizer = new StandardTokenizer(); + tokenizer.setMaxTokenLength(maxTokenLength); + return tokenizer; } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java index 1154f9b0f79..317b3e07850 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java @@ -122,11 +122,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { // English stemmers } else if ("english".equalsIgnoreCase(language)) { - if (indexVersion.onOrAfter(Version.V_1_3_0)) { - return new PorterStemFilter(tokenStream); - } else { - return new SnowballFilter(tokenStream, new EnglishStemmer()); - } + return new PorterStemFilter(tokenStream); } else if ("light_english".equalsIgnoreCase(language) || "lightEnglish".equalsIgnoreCase(language) || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); @@ -135,11 +131,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { - if (indexVersion.onOrAfter(Version.V_1_3_0)) { - return new SnowballFilter(tokenStream, new EnglishStemmer()); - } else { - return new SnowballFilter(tokenStream, new PorterStemmer()); - } + return new SnowballFilter(tokenStream, new EnglishStemmer()); } else if ("minimal_english".equalsIgnoreCase(language) || "minimalEnglish".equalsIgnoreCase(language)) { return new EnglishMinimalStemFilter(tokenStream); } else if ("possessive_english".equalsIgnoreCase(language) || "possessiveEnglish".equalsIgnoreCase(language)) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java index 6ab0c3fc9c8..322fcea452f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/StopTokenFilterFactory.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.util.CharArraySet; @@ -42,7 +41,6 @@ public class StopTokenFilterFactory extends AbstractTokenFilterFactory { private final boolean ignoreCase; - private final boolean enablePositionIncrements; private final boolean removeTrailing; public StopTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -50,21 +48,15 @@ public class StopTokenFilterFactory extends AbstractTokenFilterFactory { this.ignoreCase = settings.getAsBoolean("ignore_case", false); this.removeTrailing = settings.getAsBoolean("remove_trailing", true); this.stopWords = Analysis.parseStopWords(env, settings, StopAnalyzer.ENGLISH_STOP_WORDS_SET, ignoreCase); - if (version.onOrAfter(Version.LUCENE_4_4) && settings.get("enable_position_increments") != null) { - throw new IllegalArgumentException("enable_position_increments is not supported anymore as of Lucene 4.4 as it can create broken token streams." - + " Please fix your analysis chain or use an older compatibility version (<= 4.3)."); + if (settings.get("enable_position_increments") != null) { + throw new IllegalArgumentException("enable_position_increments is not supported anymore. Please fix your analysis chain"); } - this.enablePositionIncrements = settings.getAsBoolean("enable_position_increments", true); } @Override public TokenStream create(TokenStream tokenStream) { if (removeTrailing) { - if (version.onOrAfter(Version.LUCENE_4_4)) { - return new StopFilter(tokenStream, stopWords); - } else { - return new Lucene43StopFilter(enablePositionIncrements, tokenStream, stopWords); - } + return new StopFilter(tokenStream, stopWords); } else { return new SuggestStopFilter(tokenStream, stopWords); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java index a80c36b5a3e..c77467b2b41 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TrimTokenFilterFactory.java @@ -20,9 +20,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.miscellaneous.Lucene43TrimFilter; import org.apache.lucene.analysis.miscellaneous.TrimFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -32,26 +30,17 @@ import org.elasticsearch.index.IndexSettings; */ public class TrimTokenFilterFactory extends AbstractTokenFilterFactory { - private final boolean updateOffsets; private static final String UPDATE_OFFSETS_KEY = "update_offsets"; public TrimTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - if (version.onOrAfter(Version.LUCENE_4_4_0) && settings.get(UPDATE_OFFSETS_KEY) != null) { - throw new IllegalArgumentException(UPDATE_OFFSETS_KEY + " is not supported anymore. Please fix your analysis chain or use" - + " an older compatibility version (<=4.3) but beware that it might cause highlighting bugs."); + if (settings.get(UPDATE_OFFSETS_KEY) != null) { + throw new IllegalArgumentException(UPDATE_OFFSETS_KEY + " is not supported anymore. Please fix your analysis chain"); } - this.updateOffsets = settings.getAsBoolean("update_offsets", false); } @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new TrimFilter(tokenStream); - } else { - @SuppressWarnings("deprecation") - final TokenStream filter = new Lucene43TrimFilter(tokenStream, updateOffsets); - return filter; - } + return new TrimFilter(tokenStream); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java index 0668409fa07..3e75d214bd3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java @@ -22,8 +22,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; -import org.apache.lucene.analysis.standard.std40.UAX29URLEmailTokenizer40; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -42,14 +40,8 @@ public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { @Override public Tokenizer create() { - if (version.onOrAfter(Version.LUCENE_4_7)) { - UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } else { - UAX29URLEmailTokenizer40 tokenizer = new UAX29URLEmailTokenizer40(); - tokenizer.setMaxTokenLength(maxTokenLength); - return tokenizer; - } + UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(); + tokenizer.setMaxTokenLength(maxTokenLength); + return tokenizer; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java index 1d5a9563130..118d7f84a11 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java @@ -20,11 +20,9 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.miscellaneous.Lucene47WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; import org.apache.lucene.analysis.util.CharArraySet; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -96,17 +94,10 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_8)) { - return new WordDelimiterFilter(tokenStream, + return new WordDelimiterFilter(tokenStream, charTypeTable, flags, protoWords); - } else { - return new Lucene47WordDelimiterFilter(tokenStream, - charTypeTable, - flags, - protoWords); - } } public int getFlag(int flag, Settings settings, String key, boolean defaultValue) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java index 8d65e008f25..fc9719d36b1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java @@ -21,8 +21,6 @@ package org.elasticsearch.index.analysis.compound; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.DictionaryCompoundWordTokenFilter; -import org.apache.lucene.analysis.compound.Lucene43DictionaryCompoundWordTokenFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -41,12 +39,7 @@ public class DictionaryCompoundWordTokenFilterFactory extends AbstractCompoundWo @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } else { - return new Lucene43DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } + return new DictionaryCompoundWordTokenFilter(tokenStream, wordList, minWordSize, + minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java index 42a29784acc..152d4395ef3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.analysis.compound; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter; -import org.apache.lucene.analysis.compound.Lucene43HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.hyphenation.HyphenationTree; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -60,12 +58,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW @Override public TokenStream create(TokenStream tokenStream) { - if (version.onOrAfter(Version.LUCENE_4_4_0)) { - return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } else { - return new Lucene43HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, - minSubwordSize, maxSubwordSize, onlyLongestMatch); - } + return new HyphenationCompoundWordTokenFilter(tokenStream, hyphenationTree, wordList, minWordSize, + minSubwordSize, maxSubwordSize, onlyLongestMatch); } } diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java index 432f81da8a9..3edc509b7eb 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.logging.ESLogger; @@ -47,8 +48,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, ESLogger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene54Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene60Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene60Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 7663a322be6..a4977baa1f2 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.MappedFieldType; @@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.CompletionFieldMapper; * configured for a specific field the default postings format is used. */ // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version -public class PerFieldMappingPostingFormatCodec extends Lucene54Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene60Codec { private final ESLogger logger; private final MapperService mapperService; diff --git a/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java b/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java index 5a7f481eaad..a2900f649ef 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java +++ b/core/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.engine; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.index.translog.Translog; /** Holds a deleted version, which just adds a timestamp to {@link VersionValue} so we know when we can expire the deletion. */ @@ -44,6 +43,6 @@ class DeleteVersionValue extends VersionValue { @Override public long ramBytesUsed() { - return super.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_LONG; + return super.ramBytesUsed() + Long.BYTES; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index c412ce3b85f..01f02025aeb 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -275,7 +275,7 @@ public class InternalEngine extends Engine { SearcherManager searcherManager = null; try { try { - final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); + final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); success = true; diff --git a/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 747e955b179..f962d31bf8b 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/core/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -64,7 +64,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable { * * NUM_BYTES_OBJECT_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF + NUM_BYTES_ARRAY_HEADER [ + bytes.length] */ private static final int BASE_BYTES_PER_BYTESREF = RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - 2*RamUsageEstimator.NUM_BYTES_INT + + 2*Integer.BYTES + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER; @@ -76,7 +76,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable { * CHM's pointer to CHM.Entry, double for approx load factor: * + 2*NUM_BYTES_OBJECT_REF */ private static final int BASE_BYTES_PER_CHM_ENTRY = RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - RamUsageEstimator.NUM_BYTES_INT + + Integer.BYTES + 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF; /** Tracks bytes used by current map, i.e. what is freed on refresh. For deletes, which are also added to tombstones, we only account diff --git a/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java b/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java index 950dbdbae65..6b780c2a6a3 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java +++ b/core/src/main/java/org/elasticsearch/index/engine/VersionValue.java @@ -54,7 +54,7 @@ class VersionValue implements Accountable { @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_OBJECT_REF + translogLocation.ramBytesUsed(); + return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Long.BYTES + RamUsageEstimator.NUM_BYTES_OBJECT_REF + translogLocation.ramBytesUsed(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java index 58a7c9758b7..b3c51141e20 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java @@ -23,7 +23,7 @@ import org.apache.lucene.util.BytesRef; /** * A list of per-document binary values, sorted - * according to {@link BytesRef#getUTF8SortedAsUnicodeComparator()}. + * according to {@link BytesRef#compareTo(BytesRef)}. * There might be dups however. */ public abstract class SortedBinaryDocValues { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index bdc121b134b..2b69afa5f82 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -30,8 +30,8 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.LongsRef; -import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PagedGrowableWriter; @@ -459,7 +459,7 @@ public final class OrdinalsBuilder implements Closeable { @Override protected AcceptStatus accept(BytesRef term) throws IOException { // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return NumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; + return LegacyNumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; } }; } @@ -475,7 +475,7 @@ public final class OrdinalsBuilder implements Closeable { @Override protected AcceptStatus accept(BytesRef term) throws IOException { // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return NumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; + return LegacyNumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; } }; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 022e3ad0923..2c41dece3de 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -24,7 +24,7 @@ import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoPoint; @@ -62,7 +62,7 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData leaves; private final AtomicParentChildFieldData[] fielddata; - private final IndexReader reader; private final long ramBytesUsed; private final Map ordinalMapPerType; GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed, Map ordinalMapPerType) { - this.reader = reader; + this.coreCacheKey = reader.getCoreCacheKey(); + this.leaves = reader.leaves(); this.ramBytesUsed = ramBytesUsed; this.fielddata = fielddata; this.ordinalMapPerType = ordinalMapPerType; @@ -329,7 +331,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldDataString[] of field values diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 29081c6c913..86818a3999e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -23,11 +23,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -116,7 +116,7 @@ public class ByteFieldMapper extends NumberFieldMapper { static final class ByteFieldType extends NumberFieldType { public ByteFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected ByteFieldType(ByteFieldType ref) { @@ -155,13 +155,13 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -171,7 +171,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = parseValue(value); byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -179,8 +179,8 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index be83f0175c2..048c58297cb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -23,12 +23,11 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.ToStringUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -243,7 +242,6 @@ public class DateFieldMapper extends NumberFieldMapper { .append(" TO ") .append((upperTerm == null) ? "*" : upperTerm.toString()) .append(includeUpper ? ']' : '}') - .append(ToStringUtils.boost(getBoost())) .toString(); } } @@ -253,7 +251,7 @@ public class DateFieldMapper extends NumberFieldMapper { protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); public DateFieldType() { - super(NumericType.LONG); + super(LegacyNumericType.LONG); setFieldDataType(new FieldDataType("long")); } @@ -360,7 +358,7 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @@ -392,7 +390,7 @@ public class DateFieldMapper extends NumberFieldMapper { // not a time format iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -400,8 +398,8 @@ public class DateFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); return new FieldStats.Date( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() ); @@ -412,7 +410,7 @@ public class DateFieldMapper extends NumberFieldMapper { } private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), includeLower, includeUpper); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 7d33d09cd99..e7550dc1f92 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -24,10 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; @@ -49,7 +50,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.apache.lucene.util.NumericUtils.doubleToSortableLong; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -118,7 +118,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static final class DoubleFieldType extends NumberFieldType { public DoubleFieldType() { - super(NumericType.DOUBLE); + super(LegacyNumericType.DOUBLE); } protected DoubleFieldType(DoubleFieldType ref) { @@ -158,13 +158,13 @@ public class DoubleFieldMapper extends NumberFieldMapper { public BytesRef indexedValueForSearch(Object value) { long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseDoubleValue(lowerTerm), upperTerm == null ? null : parseDoubleValue(upperTerm), includeLower, includeUpper); @@ -174,7 +174,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { double iValue = parseDoubleValue(value); double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -182,8 +182,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); + double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)); + double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)); return new FieldStats.Double( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); @@ -284,7 +284,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { fields.add(field); } if (fieldType().hasDocValues()) { - addDocValue(context, fields, doubleToSortableLong(value)); + addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 85c5b619bf1..93cf3a7cfaf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -24,10 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; @@ -50,7 +51,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.apache.lucene.util.NumericUtils.floatToSortableInt; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -119,7 +119,7 @@ public class FloatFieldMapper extends NumberFieldMapper { static final class FloatFieldType extends NumberFieldType { public FloatFieldType() { - super(NumericType.FLOAT); + super(LegacyNumericType.FLOAT); } protected FloatFieldType(FloatFieldType ref) { @@ -159,13 +159,13 @@ public class FloatFieldMapper extends NumberFieldMapper { public BytesRef indexedValueForSearch(Object value) { int intValue = NumericUtils.floatToSortableInt(parseValue(value)); BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -175,7 +175,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { float iValue = parseValue(value); final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -183,8 +183,8 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); + float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)); + float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)); return new FieldStats.Float( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); @@ -296,7 +296,7 @@ public class FloatFieldMapper extends NumberFieldMapper { fields.add(field); } if (fieldType().hasDocValues()) { - addDocValue(context, fields, floatToSortableInt(value)); + addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 7de62510415..fa7191cafbf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -124,7 +124,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public static final class IntegerFieldType extends NumberFieldType { public IntegerFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected IntegerFieldType(IntegerFieldType ref) { @@ -164,13 +164,13 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -180,7 +180,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { int iValue = parseValue(value); int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -188,8 +188,8 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java index 171bc8de794..3f01493590c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java @@ -85,9 +85,9 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap @Override public Builder indexOptions(IndexOptions indexOptions) { - if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) > 0) { + if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) > 0) { throw new IllegalArgumentException("The [keyword] field does not support positions, got [index_options]=" - + indexOptionToString(fieldType.indexOptions())); + + indexOptionToString(indexOptions)); } return super.indexOptions(indexOptions); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index 0e9592fd72e..a1acf0ab58a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -123,7 +123,7 @@ public class LongFieldMapper extends NumberFieldMapper { public static class LongFieldType extends NumberFieldType { public LongFieldType() { - super(NumericType.LONG); + super(LegacyNumericType.LONG); } protected LongFieldType(LongFieldType ref) { @@ -162,13 +162,13 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseLongValue(lowerTerm), upperTerm == null ? null : parseLongValue(upperTerm), includeLower, includeUpper); @@ -178,7 +178,7 @@ public class LongFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { long iValue = parseLongValue(value); final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -186,8 +186,8 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 90fb20ef827..7c2a38eaee7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -129,7 +129,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM public static abstract class NumberFieldType extends MappedFieldType { - public NumberFieldType(NumericType numericType) { + public NumberFieldType(LegacyNumericType numericType) { setTokenized(false); setOmitNorms(true); setIndexOptions(IndexOptions.DOCS); @@ -295,38 +295,38 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM // used to we can use a numeric field in a document that is then parsed twice! public abstract static class CustomNumericField extends Field { - private ThreadLocal tokenStream = new ThreadLocal() { + private ThreadLocal tokenStream = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(fieldType().numericPrecisionStep()); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); } }; - private static ThreadLocal tokenStream4 = new ThreadLocal() { + private static ThreadLocal tokenStream4 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(4); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(4); } }; - private static ThreadLocal tokenStream8 = new ThreadLocal() { + private static ThreadLocal tokenStream8 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(8); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(8); } }; - private static ThreadLocal tokenStream16 = new ThreadLocal() { + private static ThreadLocal tokenStream16 = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(16); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(16); } }; - private static ThreadLocal tokenStreamMax = new ThreadLocal() { + private static ThreadLocal tokenStreamMax = new ThreadLocal() { @Override - protected NumericTokenStream initialValue() { - return new NumericTokenStream(Integer.MAX_VALUE); + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(Integer.MAX_VALUE); } }; @@ -337,7 +337,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM } } - protected NumericTokenStream getCachedStream() { + protected LegacyNumericTokenStream getCachedStream() { if (fieldType().numericPrecisionStep() == 4) { return tokenStream4.get(); } else if (fieldType().numericPrecisionStep() == 8) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 027f0b1b40b..56b1e9a78f2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -24,11 +24,11 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -121,7 +121,7 @@ public class ShortFieldMapper extends NumberFieldMapper { static final class ShortFieldType extends NumberFieldType { public ShortFieldType() { - super(NumericType.INT); + super(LegacyNumericType.INT); } protected ShortFieldType(ShortFieldType ref) { @@ -160,13 +160,13 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); @@ -176,7 +176,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { short iValue = parseValue(value); short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -184,8 +184,8 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); + long minValue = LegacyNumericUtils.getMinInt(terms); + long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index f881d206f0c..5e617dd6815 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -21,13 +21,15 @@ package org.elasticsearch.index.mapper.geo; import org.apache.lucene.document.Field; import org.apache.lucene.spatial.util.GeoHashUtils; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -56,6 +58,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; */ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser { public static final String CONTENT_TYPE = "geo_point"; + protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BaseGeoPointFieldMapper.class)); public static class Names { public static final String LAT = "lat"; public static final String LAT_SUFFIX = "." + LAT; @@ -194,9 +197,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("lat_lon")) { + deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " + + "in the next major release"); builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } else if (propName.equals("precision_step")) { + deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " + + "in the next major release"); builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("geohash")) { @@ -483,7 +490,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } - if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { + if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 0d84cf21812..75c082dd439 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -84,7 +84,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { fieldType.setTokenized(false); if (context.indexCreatedVersion().before(Version.V_2_3_0)) { fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.NumericType.LONG); + fieldType.setNumericType(FieldType.LegacyNumericType.LONG); } setupFieldType(context); return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, @@ -95,7 +95,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { public GeoPointFieldMapper build(BuilderContext context) { if (context.indexCreatedVersion().before(Version.V_2_3_0)) { fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.NumericType.LONG); + fieldType.setNumericType(FieldType.LegacyNumericType.LONG); } return super.build(context); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index e90fdae0c47..57778fa8d25 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.index.mapper.geo; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; @@ -58,7 +58,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenien /** - * FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s. + * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. *

    * Currently Shapes can only be indexed and can only be queried using * {@link org.elasticsearch.index.query.GeoShapeQueryParser}, consequently diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 18929bfd833..9a4cf70782b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -19,14 +19,14 @@ package org.elasticsearch.index.mapper.ip; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -206,7 +206,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @@ -242,7 +242,7 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); @@ -257,7 +257,7 @@ public class IpFieldMapper extends NumberFieldMapper { } catch (IllegalArgumentException e) { iSim = fuzziness.asLong(); } - return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); @@ -356,11 +356,11 @@ public class IpFieldMapper extends NumberFieldMapper { public static class NumericIpTokenizer extends NumericTokenizer { public NumericIpTokenizer(int precisionStep, char[] buffer) throws IOException { - super(new NumericTokenStream(precisionStep), buffer, null); + super(new LegacyNumericTokenStream(precisionStep), buffer, null); } @Override - protected void setValue(NumericTokenStream tokenStream, String value) { + protected void setValue(LegacyNumericTokenStream tokenStream, String value) { tokenStream.setLongValue(ipToLong(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index c8d0379d701..524266420fb 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -69,8 +69,6 @@ public final class ElasticsearchMergePolicy extends MergePolicy { /** Return an "upgraded" view of the reader. */ static CodecReader filter(CodecReader reader) throws IOException { - // convert 0.90.x _uid payloads to _version docvalues if needed - reader = VersionFieldUpgrader.wrap(reader); // TODO: remove 0.90.x/1.x freqs/prox/payloads from _uid? // the previous code never did this, so some indexes carry around trash. return reader; diff --git a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java b/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java deleted file mode 100644 index 42bd5420ac3..00000000000 --- a/core/src/main/java/org/elasticsearch/index/shard/VersionFieldUpgrader.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.apache.lucene.codecs.DocValuesProducer; -import org.apache.lucene.index.CodecReader; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.FilterCodecReader; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.packed.GrowableWriter; -import org.apache.lucene.util.packed.PackedInts; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.VersionFieldMapper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; - -/** - * Converts 0.90.x _uid payloads to _version docvalues - */ -class VersionFieldUpgrader extends FilterCodecReader { - final FieldInfos infos; - - VersionFieldUpgrader(CodecReader in) { - super(in); - - // Find a free field number - int fieldNumber = 0; - for (FieldInfo fi : in.getFieldInfos()) { - fieldNumber = Math.max(fieldNumber, fi.number + 1); - } - - // TODO: lots of things can wrong here... - FieldInfo newInfo = new FieldInfo(VersionFieldMapper.NAME, // field name - fieldNumber, // field number - false, // store term vectors - false, // omit norms - false, // store payloads - IndexOptions.NONE, // index options - DocValuesType.NUMERIC, // docvalues - -1, // docvalues generation - Collections.emptyMap() // attributes - ); - newInfo.checkConsistency(); // fail merge immediately if above code is wrong - - final ArrayList fieldInfoList = new ArrayList<>(); - for (FieldInfo info : in.getFieldInfos()) { - if (!info.name.equals(VersionFieldMapper.NAME)) { - fieldInfoList.add(info); - } - } - fieldInfoList.add(newInfo); - infos = new FieldInfos(fieldInfoList.toArray(new FieldInfo[fieldInfoList.size()])); - } - - static CodecReader wrap(CodecReader reader) throws IOException { - final FieldInfos fieldInfos = reader.getFieldInfos(); - final FieldInfo versionInfo = fieldInfos.fieldInfo(VersionFieldMapper.NAME); - if (versionInfo != null && versionInfo.getDocValuesType() != DocValuesType.NONE) { - // the reader is a recent one, it has versions and they are stored - // in a numeric doc values field - return reader; - } - // The segment is an old one, look at the _uid field - final Terms terms = reader.terms(UidFieldMapper.NAME); - if (terms == null || !terms.hasPayloads()) { - // The segment doesn't have an _uid field or doesn't have payloads - // don't try to do anything clever. If any other segment has versions - // all versions of this segment will be initialized to 0 - return reader; - } - // convert _uid payloads -> _version docvalues - return new VersionFieldUpgrader(reader); - } - - @Override - public FieldInfos getFieldInfos() { - return infos; - } - - @Override - public DocValuesProducer getDocValuesReader() { - DocValuesProducer producer = in.getDocValuesReader(); - // TODO: move this nullness stuff out - if (producer == null) { - producer = FilterDocValuesProducer.EMPTY; - } - return new UninvertedVersions(producer, this); - } - - static class UninvertedVersions extends FilterDocValuesProducer { - final CodecReader reader; - - UninvertedVersions(DocValuesProducer in, CodecReader reader) { - super(in); - this.reader = reader; - } - - @Override - public NumericDocValues getNumeric(FieldInfo field) throws IOException { - if (VersionFieldMapper.NAME.equals(field.name)) { - // uninvert into a packed ints and expose as docvalues - final Terms terms = reader.terms(UidFieldMapper.NAME); - final TermsEnum uids = terms.iterator(); - final GrowableWriter versions = new GrowableWriter(2, reader.maxDoc(), PackedInts.COMPACT); - PostingsEnum dpe = null; - for (BytesRef uid = uids.next(); uid != null; uid = uids.next()) { - dpe = uids.postings(dpe, PostingsEnum.PAYLOADS); - assert terms.hasPayloads() : "field has payloads"; - final Bits liveDocs = reader.getLiveDocs(); - for (int doc = dpe.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dpe.nextDoc()) { - if (liveDocs != null && liveDocs.get(doc) == false) { - continue; - } - dpe.nextPosition(); - final BytesRef payload = dpe.getPayload(); - if (payload != null && payload.length == 8) { - final long version = Numbers.bytesToLong(payload); - versions.set(doc, version); - break; - } - } - } - return versions; - } else { - return in.getNumeric(field); - } - } - - @Override - public Bits getDocsWithField(FieldInfo field) throws IOException { - if (VersionFieldMapper.NAME.equals(field.name)) { - return new Bits.MatchAllBits(reader.maxDoc()); - } else { - return in.getDocsWithField(field); - } - } - - @Override - public DocValuesProducer getMergeInstance() throws IOException { - return new UninvertedVersions(in.getMergeInstance(), reader); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index e950ebda1b3..edbebe8f033 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexModule; @@ -63,6 +64,10 @@ public final class SimilarityService extends AbstractIndexComponent { Map similaritySettings = this.indexSettings.getSettings().getGroups(IndexModule.SIMILARITY_SETTINGS_PREFIX); for (Map.Entry entry : similaritySettings.entrySet()) { String name = entry.getKey(); + // Starting with v5.0 indices, it should no longer be possible to redefine built-in similarities + if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) { + throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]"); + } Settings settings = entry.getValue(); String typeName = settings.get("type"); if (typeName == null) { @@ -76,9 +81,16 @@ public final class SimilarityService extends AbstractIndexComponent { } providers.put(name, factory.apply(name, settings)); } - addSimilarities(similaritySettings, providers, DEFAULTS); + for (Map.Entry entry : addSimilarities(similaritySettings, DEFAULTS).entrySet()) { + // Avoid overwriting custom providers for indices older that v5.0 + if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0)) { + continue; + } + providers.put(entry.getKey(), entry.getValue()); + } this.similarities = providers; - defaultSimilarity = providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); + defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get() + : providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); // Expert users can configure the base type as being different to default, but out-of-box we use default. baseSimilarity = (providers.get("base") != null) ? providers.get("base").get() : defaultSimilarity; @@ -90,7 +102,9 @@ public final class SimilarityService extends AbstractIndexComponent { defaultSimilarity; } - private void addSimilarities(Map similaritySettings, Map providers, Map> similarities) { + private Map addSimilarities(Map similaritySettings, + Map> similarities) { + Map providers = new HashMap<>(similarities.size()); for (Map.Entry> entry : similarities.entrySet()) { String name = entry.getKey(); BiFunction factory = entry.getValue(); @@ -100,12 +114,17 @@ public final class SimilarityService extends AbstractIndexComponent { } providers.put(name, factory.apply(name, settings)); } + return providers; } public SimilarityProvider getSimilarity(String name) { return similarities.get(name); } + public SimilarityProvider getDefaultSimilarity() { + return similarities.get("default"); + } + static class PerFieldSimilarity extends PerFieldSimilarityWrapper { private final Similarity defaultSimilarity; diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java index cfd5dc8f066..9f712c77e70 100644 --- a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java +++ b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java @@ -36,7 +36,7 @@ import java.util.Objects; */ public class StoreFileMetaData implements Writeable { - public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0; + public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_5_0_0; private final String name; diff --git a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index cd0f94567f3..54ba8638eb2 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -22,7 +22,6 @@ import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.InputStreamDataInput; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.Channels; import java.io.IOException; @@ -36,9 +35,9 @@ import java.nio.file.Path; */ class Checkpoint { - static final int BUFFER_SIZE = RamUsageEstimator.NUM_BYTES_INT // ops - + RamUsageEstimator.NUM_BYTES_LONG // offset - + RamUsageEstimator.NUM_BYTES_LONG;// generation + static final int BUFFER_SIZE = Integer.BYTES // ops + + Long.BYTES // offset + + Long.BYTES;// generation final long offset; final int numOps; final long generation; diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 5a4438f426d..31b8db03141 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -418,10 +418,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out); final long start = out.position(); - out.skip(RamUsageEstimator.NUM_BYTES_INT); + out.skip(Integer.BYTES); writeOperationNoSize(checksumStreamOutput, operation); final long end = out.position(); - final int operationSize = (int) (end - RamUsageEstimator.NUM_BYTES_INT - start); + final int operationSize = (int) (end - Integer.BYTES - start); out.seek(start); out.writeInt(operationSize); out.seek(end); @@ -636,7 +636,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + Integer.BYTES; } @Override @@ -1144,10 +1144,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC for (Operation op : toWrite) { out.reset(); final long start = out.position(); - out.skip(RamUsageEstimator.NUM_BYTES_INT); + out.skip(Integer.BYTES); writeOperationNoSize(checksumStreamOutput, op); long end = out.position(); - int operationSize = (int) (out.position() - RamUsageEstimator.NUM_BYTES_INT - start); + int operationSize = (int) (out.position() - Integer.BYTES - start); out.seek(start); out.writeInt(operationSize); out.seek(end); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index ecc3822361c..fcb3daea796 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.InputStreamStreamInput; @@ -116,7 +115,7 @@ public class TranslogReader extends BaseTranslogReader implements Closeable { if (uuidBytes.bytesEquals(ref) == false) { throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog. path:" + path); } - return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + RamUsageEstimator.NUM_BYTES_INT, checkpoint.offset, checkpoint.numOps); + return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps); default: throw new TranslogCorruptedException("No known translog stream version: " + version + " path:" + path); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index a1fc708ddaf..e215669761c 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -24,7 +24,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.unit.ByteSizeValue; @@ -76,7 +75,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { } private static int getHeaderLength(int uuidLength) { - return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; + return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + Integer.BYTES; } public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 575153c8ada..32b5f55b369 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -228,7 +228,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public long ramBytesUsed() { - return RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG + value.length(); + return RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES + value.length(); } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index f99b39ef620..5d2fb761842 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -19,6 +19,8 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.hunspell.Dictionary; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; @@ -183,7 +185,9 @@ public class HunspellService extends AbstractComponent { affixStream = Files.newInputStream(affixFiles[0]); - return new Dictionary(affixStream, dicStreams, ignoreCase); + try (Directory tmp = new SimpleFSDirectory(env.tmpFile())) { + return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase); + } } catch (Exception e) { logger.error("Could not load hunspell dictionary [{}]", e, locale); diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index 78a1f66fb80..b38f7470e39 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -20,11 +20,17 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.core.IngestInfo; +import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; /** * Holder class for several ingest related services. @@ -53,6 +59,15 @@ public class IngestService implements Closeable { pipelineStore.buildProcessorFactoryRegistry(processorsRegistryBuilder, scriptService); } + public IngestInfo info() { + Map processorFactories = pipelineStore.getProcessorRegistry().getProcessorFactories(); + List processorInfoList = new ArrayList<>(processorFactories.size()); + for (Map.Entry entry : processorFactories.entrySet()) { + processorInfoList.add(new ProcessorInfo(entry.getKey())); + } + return new IngestInfo(processorInfoList); + } + @Override public void close() throws IOException { pipelineStore.close(); diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 3999f357b86..ac2df419f55 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -31,12 +32,15 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.ingest.core.Pipeline; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptService; @@ -47,6 +51,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener { @@ -130,8 +135,8 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust pipelines.remove(request.getId()); ClusterState.Builder newState = ClusterState.builder(currentState); newState.metaData(MetaData.builder(currentState.getMetaData()) - .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) - .build()); + .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) + .build()); return newState.build(); } } @@ -139,15 +144,9 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust /** * Stores the specified pipeline definition in the request. */ - public void put(ClusterService clusterService, PutPipelineRequest request, ActionListener listener) { + public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, ActionListener listener) throws Exception { // validates the pipeline and processor configuration before submitting a cluster update task: - Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); - try { - factory.create(request.getId(), pipelineConfig, processorRegistry); - } catch(Exception e) { - listener.onFailure(e); - return; - } + validatePipeline(ingestInfos, request); clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), new AckedClusterStateUpdateTask(request, listener) { @Override @@ -162,6 +161,25 @@ public class PipelineStore extends AbstractComponent implements Closeable, Clust }); } + void validatePipeline(Map ingestInfos, PutPipelineRequest request) throws Exception { + if (ingestInfos.isEmpty()) { + throw new IllegalStateException("Ingest info is empty"); + } + + Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2(); + Pipeline pipeline = factory.create(request.getId(), pipelineConfig, processorRegistry); + List exceptions = new ArrayList<>(); + for (Processor processor : pipeline.flattenAllProcessors()) { + for (Map.Entry entry : ingestInfos.entrySet()) { + if (entry.getValue().containsProcessor(processor.getType()) == false) { + String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; + exceptions.add(new IllegalArgumentException(message)); + } + } + } + ExceptionsHelper.rethrowAndSuppress(exceptions); + } + ClusterState innerPut(PutPipelineRequest request, ClusterState currentState) { IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE); Map pipelines; diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java index bd885c578b3..e831d70702e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java +++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java @@ -21,6 +21,7 @@ package org.elasticsearch.ingest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ingest.core.Processor; +import org.elasticsearch.ingest.core.ProcessorInfo; import org.elasticsearch.ingest.core.TemplateService; import java.io.Closeable; diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index c784ea1c57a..ddf3781d1a6 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -20,6 +20,9 @@ package org.elasticsearch.ingest.core; +import org.elasticsearch.common.util.iterable.Iterables; + +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -56,6 +59,24 @@ public class CompoundProcessor implements Processor { return processors; } + public List flattenProcessors() { + List allProcessors = new ArrayList<>(flattenProcessors(processors)); + allProcessors.addAll(flattenProcessors(onFailureProcessors)); + return allProcessors; + } + + private static List flattenProcessors(List processors) { + List flattened = new ArrayList<>(); + for (Processor processor : processors) { + if (processor instanceof CompoundProcessor) { + flattened.addAll(((CompoundProcessor) processor).flattenProcessors()); + } else { + flattened.add(processor); + } + } + return flattened; + } + @Override public String getType() { return "compound"; diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java new file mode 100644 index 00000000000..8625e1d8884 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestInfo.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.core; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class IngestInfo implements Streamable, ToXContent { + + private Set processors; + + public IngestInfo() { + processors = Collections.emptySet(); + } + + public IngestInfo(List processors) { + this.processors = new LinkedHashSet<>(processors); + } + + public Iterable getProcessors() { + return processors; + } + + public boolean containsProcessor(String type) { + return processors.contains(new ProcessorInfo(type)); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + int size = in.readVInt(); + Set processors = new LinkedHashSet<>(size); + for (int i = 0; i < size; i++) { + ProcessorInfo info = new ProcessorInfo(); + info.readFrom(in); + processors.add(info); + } + this.processors = processors; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.write(processors.size()); + for (ProcessorInfo info : processors) { + info.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("ingest"); + builder.startArray("processors"); + for (ProcessorInfo info : processors) { + info.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestInfo that = (IngestInfo) o; + return Objects.equals(processors, that.processors); + } + + @Override + public int hashCode() { + return Objects.hash(processors); + } +} diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java index 9b887ec229c..821a44c0a96 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java @@ -83,6 +83,14 @@ public final class Pipeline { return compoundProcessor.getOnFailureProcessors(); } + /** + * Flattens the normal and on failure processors into a single list. The original order is lost. + * This can be useful for pipeline validation purposes. + */ + public List flattenAllProcessors() { + return compoundProcessor.flattenProcessors(); + } + public final static class Factory { public Pipeline create(String id, Map config, ProcessorsRegistry processorRegistry) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java new file mode 100644 index 00000000000..64c3d19719b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/core/ProcessorInfo.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.core; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +public class ProcessorInfo implements Streamable, ToXContent { + + private String type; + + ProcessorInfo() { + } + + public ProcessorInfo(String type) { + this.type = type; + } + + /** + * @return The unique processor type + */ + public String getType() { + return type; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.type = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(this.type); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ProcessorInfo that = (ProcessorInfo) o; + + return type.equals(that.type); + + } + + @Override + public int hashCode() { + return type.hashCode(); + } +} diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java index b5b8e8f2cb6..88b2fe48868 100644 --- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java +++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java @@ -84,7 +84,6 @@ public class NodeService extends AbstractComponent implements Closeable { this.transportService = transportService; this.indicesService = indicesService; this.discovery = discovery; - discovery.setNodeService(this); this.version = version; this.pluginService = pluginService; this.circuitBreakerService = circuitBreakerService; @@ -132,12 +131,13 @@ public class NodeService extends AbstractComponent implements Closeable { threadPool.info(), transportService.info(), httpServer == null ? null : httpServer.info(), - pluginService == null ? null : pluginService.info() + pluginService == null ? null : pluginService.info(), + ingestService == null ? null : ingestService.info() ); } public NodeInfo info(boolean settings, boolean os, boolean process, boolean jvm, boolean threadPool, - boolean transport, boolean http, boolean plugin) { + boolean transport, boolean http, boolean plugin, boolean ingest) { return new NodeInfo(version, Build.CURRENT, discovery.localNode(), serviceAttributes, settings ? settingsFilter.filter(this.settings) : null, os ? monitorService.osService().info() : null, @@ -146,7 +146,8 @@ public class NodeService extends AbstractComponent implements Closeable { threadPool ? this.threadPool.info() : null, transport ? transportService.info() : null, http ? (httpServer == null ? null : httpServer.info()) : null, - plugin ? (pluginService == null ? null : pluginService.info()) : null + plugin ? (pluginService == null ? null : pluginService.info()) : null, + ingest ? (ingestService == null ? null : ingestService.info()) : null ); } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java index b3208b4133c..98be7d308af 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorQuery.java @@ -115,10 +115,6 @@ final class PercolatorQuery extends Query { @Override public Query rewrite(IndexReader reader) throws IOException { - if (getBoost() != 1f) { - return super.rewrite(reader); - } - Query rewritten = percolatorQueriesQuery.rewrite(reader); if (rewritten != percolatorQueriesQuery) { return new PercolatorQuery(rewritten, percolatorIndexSearcher, percolatorQueries); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 552e6aaf2e4..a6ea381adb4 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -458,7 +458,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent extends BlobStoreForm BytesReference bytes = write(obj); try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { final String resourceDesc = "ChecksumBlobStoreFormat.writeBlob(blob=\"" + blobName + "\")"; - try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, byteArrayOutputStream, BUFFER_SIZE)) { + try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, blobName, byteArrayOutputStream, BUFFER_SIZE)) { CodecUtil.writeHeader(indexOutput, codec, VERSION); try (OutputStream indexOutputOutputStream = new IndexOutputOutputStream(indexOutput) { @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index f11efeca87d..bd6637cb788 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -48,7 +48,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesInfoAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport"); + private final static Set ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport", "ingest"); @Inject public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { @@ -101,6 +101,7 @@ public class RestNodesInfoAction extends BaseRestHandler { nodesInfoRequest.transport(metrics.contains("transport")); nodesInfoRequest.http(metrics.contains("http")); nodesInfoRequest.plugins(metrics.contains("plugins")); + nodesInfoRequest.ingest(metrics.contains("ingest")); } settingsFilter.addFilterSettingParams(request); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java index 99cdc16253a..658090bb6db 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java @@ -52,10 +52,10 @@ public class RestCancelTasksAction extends BaseRestHandler { TaskId parentTaskId = new TaskId(request.param("parent_task_id")); CancelTasksRequest cancelTasksRequest = new CancelTasksRequest(); - cancelTasksRequest.taskId(taskId); - cancelTasksRequest.nodesIds(nodesIds); - cancelTasksRequest.actions(actions); - cancelTasksRequest.parentTaskId(parentTaskId); + cancelTasksRequest.setTaskId(taskId); + cancelTasksRequest.setNodesIds(nodesIds); + cancelTasksRequest.setActions(actions); + cancelTasksRequest.setParentTaskId(parentTaskId); client.admin().cluster().cancelTasks(cancelTasksRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java index 992267fa8a5..9a9d1991298 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -50,13 +50,15 @@ public class RestListTasksAction extends BaseRestHandler { TaskId taskId = new TaskId(request.param("taskId")); String[] actions = Strings.splitStringByCommaToArray(request.param("actions")); TaskId parentTaskId = new TaskId(request.param("parent_task_id")); + boolean waitForCompletion = request.paramAsBoolean("wait_for_completion", false); ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.taskId(taskId); - listTasksRequest.nodesIds(nodesIds); - listTasksRequest.detailed(detailed); - listTasksRequest.actions(actions); - listTasksRequest.parentTaskId(parentTaskId); + listTasksRequest.setTaskId(taskId); + listTasksRequest.setNodesIds(nodesIds); + listTasksRequest.setDetailed(detailed); + listTasksRequest.setActions(actions); + listTasksRequest.setParentTaskId(parentTaskId); + listTasksRequest.setWaitForCompletion(waitForCompletion); client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index 4e90a6a3a85..a3d0cc84559 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -144,8 +144,12 @@ public class RestAnalyzeAction extends BaseRestHandler { charFilters.add(parser.text()); } analyzeRequest.charFilters(charFilters.toArray(new String[charFilters.size()])); - } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN) && token == XContentParser.Token.VALUE_BOOLEAN) { - analyzeRequest.explain(parser.booleanValue()); + } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN)) { + if (parser.isBooleanValue()) { + analyzeRequest.explain(parser.booleanValue()); + } else { + throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'"); + } } else if (parseFieldMatcher.match(currentFieldName, Fields.ATTRIBUTES) && token == XContentParser.Token.START_ARRAY){ List attributes = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java index b0479475d86..1821124473f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -108,7 +109,7 @@ public class SignificantStringTerms extends InternalSignificantTerms @Override protected void doReadFrom(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) { - this.docCountError = in.readLong(); - } else { - this.docCountError = -1; - } + this.docCountError = in.readLong(); this.order = InternalOrder.Streams.readOrder(in); this.formatter = ValueFormatterStreams.readOptional(in); this.requiredSize = readSize(in); @@ -218,9 +214,7 @@ public class DoubleTerms extends InternalTerms @Override protected void doWriteTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) { - out.writeLong(docCountError); - } + out.writeLong(docCountError); InternalOrder.Streams.writeOrder(order, out); ValueFormatterStreams.writeOptional(formatter, out); writeSize(requiredSize, out); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 91e949e190f..4377b9debbb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; @@ -136,7 +135,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr protected static void copy(BytesRef from, BytesRef to) { if (to.bytes.length < from.length) { - to.bytes = new byte[ArrayUtil.oversize(from.length, RamUsageEstimator.NUM_BYTES_BYTE)]; + to.bytes = new byte[ArrayUtil.oversize(from.length, 1)]; } to.offset = 0; to.length = from.length; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 0b9ebd97cf9..040768f9d3b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -105,7 +105,7 @@ public class StringTerms extends InternalTerms @Override int compareTerm(Terms.Bucket other) { - return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes); + return termBytes.compareTo(((Bucket) other).termBytes); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index eee9d4cbf90..41dd0bb441e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -518,13 +518,13 @@ public class IncludeExclude implements Writeable, ToXContent { if (includeValues != null) { for (BytesRef val : includeValues) { double dval=Double.parseDouble(val.utf8ToString()); - result.addAccept( NumericUtils.doubleToSortableLong(dval)); + result.addAccept(NumericUtils.doubleToSortableLong(dval)); } } if (excludeValues != null) { for (BytesRef val : excludeValues) { double dval=Double.parseDouble(val.utf8ToString()); - result.addReject( NumericUtils.doubleToSortableLong(dval)); + result.addReject(NumericUtils.doubleToSortableLong(dval)); } } return result; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java index 2e8ce4563ce..568ecdbec59 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -67,7 +66,7 @@ public final class HyperLogLogPlusPlus implements Releasable { */ public static int precisionFromThreshold(long count) { final long hashTableEntries = (long) Math.ceil(count / MAX_LOAD_FACTOR); - int precision = PackedInts.bitsRequired(hashTableEntries * RamUsageEstimator.NUM_BYTES_INT); + int precision = PackedInts.bitsRequired(hashTableEntries * Integer.BYTES); precision = Math.max(precision, MIN_PRECISION); precision = Math.min(precision, MAX_PRECISION); return precision; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java index 4687002cf12..2dfab325127 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java @@ -167,14 +167,12 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue } @Override - public InternalAggregation buildAggregation(long owningBucketOrdinal) { - if (valuesSource == null) { - return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, - pipelineAggregators(), metaData()); + public InternalAggregation buildAggregation(long bucket) { + if (valuesSource == null || bucket >= counts.size()) { + return buildEmptyAggregation(); } - assert owningBucketOrdinal < counts.size(); - return new InternalExtendedStats(name, counts.get(owningBucketOrdinal), sums.get(owningBucketOrdinal), - mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter, + return new InternalExtendedStats(name, counts.get(bucket), sums.get(bucket), + mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, formatter, pipelineAggregators(), metaData()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java index 543c5907070..9fac5809cef 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java @@ -158,19 +158,13 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStat @Override public void readOtherStatsFrom(StreamInput in) throws IOException { sumOfSqrs = in.readDouble(); - if (in.getVersion().onOrAfter(Version.V_1_4_3)) { - sigma = in.readDouble(); - } else { - sigma = 2.0; - } + sigma = in.readDouble(); } @Override protected void writeOtherStatsTo(StreamOutput out) throws IOException { out.writeDouble(sumOfSqrs); - if (out.getVersion().onOrAfter(Version.V_1_4_3)) { - out.writeDouble(sigma); - } + out.writeDouble(sigma); } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java index 8ad24b5cb19..7a15f67dbd6 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java @@ -78,12 +78,12 @@ public final class CustomQueryScorer extends QueryScorer { Map terms) throws IOException { if (query instanceof FunctionScoreQuery) { query = ((FunctionScoreQuery) query).getSubQuery(); - extract(query, query.getBoost(), terms); + extract(query, 1F, terms); } else if (query instanceof FiltersFunctionScoreQuery) { query = ((FiltersFunctionScoreQuery) query).getSubQuery(); - extract(query, query.getBoost(), terms); + extract(query, 1F, terms); } else if (terms.isEmpty()) { - extractWeightedTerms(terms, query, query.getBoost()); + extractWeightedTerms(terms, query, 1F); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index b3175e6c22a..b9ae34b60b0 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -89,23 +89,12 @@ public final class FragmentBuilderHelper { } if (analyzer instanceof CustomAnalyzer) { final CustomAnalyzer a = (CustomAnalyzer) analyzer; - if (a.tokenizerFactory() instanceof EdgeNGramTokenizerFactory - || (a.tokenizerFactory() instanceof NGramTokenizerFactory - && !((NGramTokenizerFactory)a.tokenizerFactory()).version().onOrAfter(Version.LUCENE_4_2))) { - // ngram tokenizer is broken before 4.2 - return true; - } TokenFilterFactory[] tokenFilters = a.tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory instanceof WordDelimiterTokenFilterFactory || tokenFilterFactory instanceof EdgeNGramTokenFilterFactory) { return true; } - if (tokenFilterFactory instanceof NGramTokenFilterFactory - && !((NGramTokenFilterFactory)tokenFilterFactory).version().onOrAfter(Version.LUCENE_4_2)) { - // ngram token filter is broken before 4.2 - return true; - } } } return false; diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 6c01a27442e..8c3c19343b4 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.internal; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; @@ -233,9 +232,6 @@ public class DefaultSearchContext extends SearchContext { Query result; if (Queries.isConstantMatchAllQuery(query())) { result = new ConstantScoreQuery(searchFilter); - if (query().getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) { - result = new BoostQuery(result, query().getBoost()); - } } else { result = new BooleanQuery.Builder() .add(query, Occur.MUST) diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index c415fd5a70b..4f082b057da 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -35,8 +35,6 @@ public class FieldSortBuilder extends SortBuilder { private Object missing; - private Boolean ignoreUnmapped; - private String unmappedType; private String sortMode; @@ -76,17 +74,6 @@ public class FieldSortBuilder extends SortBuilder { return this; } - /** - * Sets if the field does not exists in the index, it should be ignored and not sorted by or not. Defaults - * to false (not ignoring). - * @deprecated Use {@link #unmappedType(String)} instead. - */ - @Deprecated - public FieldSortBuilder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - /** * Set the type to use in case the current field is not mapped in an index. * Specifying a type tells Elasticsearch what type the sort values should have, which is important @@ -138,9 +125,6 @@ public class FieldSortBuilder extends SortBuilder { if (missing != null) { builder.field("missing", missing); } - if (ignoreUnmapped != null) { - builder.field(SortParseElement.IGNORE_UNMAPPED.getPreferredName(), ignoreUnmapped); - } if (unmappedType != null) { builder.field(SortParseElement.UNMAPPED_TYPE.getPreferredName(), unmappedType); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index a99158787d3..5349d6fc0d6 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchParseElement; @@ -55,7 +54,6 @@ public class SortParseElement implements SearchParseElement { private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC); private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true); - public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); public static final String SCORE_FIELD_NAME = "_score"; @@ -156,12 +154,6 @@ public class SortParseElement implements SearchParseElement { } } else if ("missing".equals(innerJsonName)) { missing = parser.textOrNull(); - } else if (context.parseFieldMatcher().match(innerJsonName, IGNORE_UNMAPPED)) { - // backward compatibility: ignore_unmapped has been replaced with unmapped_type - if (unmappedType == null // don't override if unmapped_type has been provided too - && parser.booleanValue()) { - unmappedType = LongFieldMapper.CONTENT_TYPE; - } } else if (context.parseFieldMatcher().match(innerJsonName, UNMAPPED_TYPE)) { unmappedType = parser.textOrNull(); } else if ("mode".equals(innerJsonName)) { diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index c7f4392e56a..8b6f1198705 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -222,7 +222,7 @@ public class ThreadPool extends AbstractComponent implements Closeable { int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5); int halfProcMaxAt10 = Math.min(((availableProcessors + 1) / 2), 10); Map defaultExecutorTypeSettings = new HashMap<>(); - add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).keepAlive("30s")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).size(4 * availableProcessors).keepAlive("30s")); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INDEX).size(availableProcessors).queueSize(200)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.BULK).size(availableProcessors).queueSize(50)); add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GET).size(availableProcessors).queueSize(1000)); diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 608b33db0fe..4909959015b 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,9 +31,12 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-5.5.0.jar}" { +grant codeBase "${codebase.lucene-core-6.0.0-snapshot-bea235f.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) + // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + // java 9 "package" + permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.ref"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // NOTE: also needed for RAMUsageEstimator size calculations permission java.lang.RuntimePermission "accessDeclaredMembers"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 856cd50e2a9..fafa57118c2 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-5.5.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-bea235f.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 39b4df44059..94806422c17 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -82,7 +82,7 @@ public class BlendedTermQueryTests extends ESTestCase { w.addDocument(d); } w.commit(); - DirectoryReader reader = DirectoryReader.open(w, true); + DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { @@ -143,7 +143,7 @@ public class BlendedTermQueryTests extends ESTestCase { w.addDocument(d); } w.commit(); - DirectoryReader reader = DirectoryReader.open(w, true); + DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { String[] fields = new String[]{"username", "song"}; diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 4669f5bc718..7824ecd39b1 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -31,8 +31,8 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.Version.V_0_20_0; -import static org.elasticsearch.Version.V_0_90_0; +import static org.elasticsearch.Version.V_2_2_0; +import static org.elasticsearch.Version.V_5_0_0; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; @@ -42,21 +42,27 @@ import static org.hamcrest.Matchers.sameInstance; public class VersionTests extends ESTestCase { public void testVersionComparison() throws Exception { - assertThat(V_0_20_0.before(V_0_90_0), is(true)); - assertThat(V_0_20_0.before(V_0_20_0), is(false)); - assertThat(V_0_90_0.before(V_0_20_0), is(false)); + assertThat(V_2_2_0.before(V_5_0_0), is(true)); + assertThat(V_2_2_0.before(V_2_2_0), is(false)); + assertThat(V_5_0_0.before(V_2_2_0), is(false)); - assertThat(V_0_20_0.onOrBefore(V_0_90_0), is(true)); - assertThat(V_0_20_0.onOrBefore(V_0_20_0), is(true)); - assertThat(V_0_90_0.onOrBefore(V_0_20_0), is(false)); + assertThat(V_2_2_0.onOrBefore(V_5_0_0), is(true)); + assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true)); + assertThat(V_5_0_0.onOrBefore(V_2_2_0), is(false)); - assertThat(V_0_20_0.after(V_0_90_0), is(false)); - assertThat(V_0_20_0.after(V_0_20_0), is(false)); - assertThat(V_0_90_0.after(V_0_20_0), is(true)); + assertThat(V_2_2_0.after(V_5_0_0), is(false)); + assertThat(V_2_2_0.after(V_2_2_0), is(false)); + assertThat(V_5_0_0.after(V_2_2_0), is(true)); + + assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false)); + assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true)); + assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true)); + + assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1"))); + assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); + assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); + assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); - assertThat(V_0_20_0.onOrAfter(V_0_90_0), is(false)); - assertThat(V_0_20_0.onOrAfter(V_0_20_0), is(true)); - assertThat(V_0_90_0.onOrAfter(V_0_20_0), is(true)); } public void testVersionConstantPresent() { @@ -127,31 +133,57 @@ public class VersionTests extends ESTestCase { public void testIndexCreatedVersion() { // an actual index has a IndexMetaData.SETTING_INDEX_UUID - final Version version = randomFrom(Version.V_0_18_0, Version.V_0_90_13, Version.V_1_3_0); + final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0); assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); } public void testMinCompatVersion() { assertThat(Version.V_2_0_0_beta1.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0_beta1)); - assertThat(Version.V_1_3_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_2_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_2_3.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0)); - assertThat(Version.V_1_0_0_RC2.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0_RC2)); + assertThat(Version.V_2_1_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0)); + assertThat(Version.V_5_0_0.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0)); } public void testToString() { // with 2.0.beta we lowercase assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString()); - assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.toString()); - assertEquals("1.4.0", Version.V_1_4_0.toString()); + assertEquals("5.0.0", Version.V_5_0_0.toString()); + assertEquals("2.3.0", Version.V_2_3_0.toString()); + assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString()); + assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString()); + assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString()); + assertEquals("5.0.0-beta1", Version.fromString("5.0.0-beta1").toString()); + assertEquals("5.0.0-alpha1", Version.fromString("5.0.0-alpha1").toString()); } public void testIsBeta() { assertTrue(Version.V_2_0_0_beta1.isBeta()); - assertTrue(Version.V_1_4_0_Beta1.isBeta()); - assertFalse(Version.V_1_4_0.isBeta()); + assertTrue(Version.fromString("1.0.0.Beta1").isBeta()); + assertTrue(Version.fromString("0.90.0.Beta1").isBeta()); } + + public void testIsAlpha() { + assertTrue(new Version(5000001, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); + assertFalse(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); + assertTrue(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isBeta()); + assertTrue(Version.fromString("5.0.0-alpha14").isAlpha()); + assertEquals(5000014, Version.fromString("5.0.0-alpha14").id); + assertTrue(Version.fromId(5000015).isAlpha()); + + for (int i = 0 ; i < 25; i++) { + assertEquals(Version.fromString("5.0.0-alpha" + i).id, Version.fromId(5000000 + i).id); + assertEquals("5.0.0-alpha" + i, Version.fromId(5000000 + i).toString()); + } + + for (int i = 0 ; i < 25; i++) { + assertEquals(Version.fromString("5.0.0-beta" + i).id, Version.fromId(5000000 + i + 25).id); + assertEquals("5.0.0-beta" + i, Version.fromId(5000000 + i + 25).toString()); + } + } + + public void testParseVersion() { final int iters = scaledRandomIntBetween(100, 1000); for (int i = 0; i < iters; i++) { @@ -162,6 +194,17 @@ public class VersionTests extends ESTestCase { Version parsedVersion = Version.fromString(version.toString()); assertEquals(version, parsedVersion); } + + expectThrows(IllegalArgumentException.class, () -> { + Version.fromString("5.0.0-alph2"); + }); + assertSame(Version.CURRENT, Version.fromString(Version.CURRENT.toString())); + + assertSame(Version.fromString("2.0.0-SNAPSHOT"), Version.fromString("2.0.0")); + + expectThrows(IllegalArgumentException.class, () -> { + Version.fromString("5.0.0-SNAPSHOT"); + }); } public void testParseLenient() { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 5109ab979cf..586f178d12d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -237,8 +237,8 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Cancel main task CancelTasksRequest request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); // And send the cancellation request to a random node CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request) .get(); @@ -270,7 +270,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are no longer running ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().taskId( + .transportListTasksAction.execute(new ListTasksRequest().setTaskId( new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId()))).get(); assertEquals(0, listTasksResponse.getTasks().size()); @@ -313,7 +313,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are running ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().parentTaskId(new TaskId(mainNode, mainTask.getId()))).get(); + .transportListTasksAction.execute(new ListTasksRequest().setParentTaskId(new TaskId(mainNode, mainTask.getId()))).get(); assertThat(listTasksResponse.getTasks().size(), greaterThanOrEqualTo(blockOnNodes.size())); // Simulate the coordinating node leaving the cluster @@ -331,8 +331,8 @@ public class CancellableTasksTests extends TaskManagerTestCase { logger.info("--> Simulate issuing cancel request on the node that is about to leave the cluster"); // Simulate issuing cancel request on the node that is about to leave the cluster CancelTasksRequest request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); // And send the cancellation request to a random node CancelTasksResponse response = testNodes[0].transportCancelTasksAction.execute(request).get(); logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster"); @@ -356,7 +356,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are no longer running try { ListTasksResponse listTasksResponse1 = testNodes[randomIntBetween(1, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().taskId(new TaskId(mainNode, mainTask.getId()))).get(); + .transportListTasksAction.execute(new ListTasksRequest().setTaskId(new TaskId(mainNode, mainTask.getId()))).get(); assertEquals(0, listTasksResponse1.getTasks().size()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index eaa3caf9084..8c791a99018 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.action.admin.cluster.node.tasks; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; @@ -40,6 +42,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.test.tasks.MockTaskManagerListener; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.ReceiveTimeoutTransportException; import java.io.IOException; import java.util.ArrayList; @@ -54,8 +57,11 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.emptyCollectionOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -327,6 +333,78 @@ public class TasksIT extends ESIntegTestCase { assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()); } + public void testTasksListWaitForCompletion() throws Exception { + // Start blocking test task + ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()) + .execute(); + + ListenableActionFuture waitResponseFuture; + try { + // Wait for the task to start on all nodes + assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(), + client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size())); + + // Spin up a request to wait for that task to finish + waitResponseFuture = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).execute(); + } finally { + // Unblock the request so the wait for completion request can finish + TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get(); + } + + // Now that the task is unblocked the list response will come back + ListTasksResponse waitResponse = waitResponseFuture.get(); + // If any tasks come back then they are the tasks we asked for - it'd be super weird if this wasn't true + for (TaskInfo task: waitResponse.getTasks()) { + assertEquals(task.getAction(), TestTaskPlugin.TestTaskAction.NAME + "[n]"); + } + // See the next test to cover the timeout case + + future.get(); + } + + public void testTasksListWaitForTimeout() throws Exception { + // Start blocking test task + ListenableActionFuture future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()) + .execute(); + try { + // Wait for the task to start on all nodes + assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(), + client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size())); + + // Spin up a request that should wait for those tasks to finish + // It will timeout because we haven't unblocked the tasks + ListTasksResponse waitResponse = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(100)) + .get(); + + assertFalse(waitResponse.getNodeFailures().isEmpty()); + for (FailedNodeException failure : waitResponse.getNodeFailures()) { + Throwable timeoutException = failure.getCause(); + // The exception sometimes comes back wrapped depending on the client + if (timeoutException.getCause() != null) { + timeoutException = timeoutException.getCause(); + } + assertThat(timeoutException, + either(instanceOf(ElasticsearchTimeoutException.class)).or(instanceOf(ReceiveTimeoutTransportException.class))); + } + } finally { + // Now we can unblock those requests + TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get(); + } + future.get(); + } + + public void testTasksListWaitForNoTask() throws Exception { + // Spin up a request to wait for no matching tasks + ListenableActionFuture waitResponseFuture = client().admin().cluster().prepareListTasks() + .setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(10)) + .execute(); + + // It should finish quickly and without complaint + assertThat(waitResponseFuture.get().getTasks(), emptyCollectionOf(TaskInfo.class)); + } + @Override public void tearDown() throws Exception { for (Map.Entry, RecordingTaskManagerListener> entry : listeners.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 0d4372a51eb..e8dcd228e50 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -345,7 +345,10 @@ public class TestTaskPlugin extends Plugin { public static class UnblockTestTasksRequest extends BaseTasksRequest { - + @Override + public boolean match(Task task) { + return task instanceof TestTask && super.match(task); + } } public static class UnblockTestTasksResponse extends BaseTasksResponse { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index e1501f9b14c..556eee238fd 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -355,7 +355,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { int testNodeNum = randomIntBetween(0, testNodes.length - 1); TestNode testNode = testNodes[testNodeNum]; ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction*"); // pick all test actions + listTasksRequest.setActions("testAction*"); // pick all test actions logger.info("Listing currently running tasks using node [{}]", testNodeNum); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); logger.info("Checking currently running tasks"); @@ -371,7 +371,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with filtering testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction[n]"); // only pick node actions + listTasksRequest.setActions("testAction[n]"); // only pick node actions response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -380,7 +380,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } // Check task counts using transport with detailed description - listTasksRequest.detailed(true); // same request only with detailed description + listTasksRequest.setDetailed(true); // same request only with detailed description response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -389,7 +389,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } // Make sure that the main task on coordinating node is the task that was returned to us by execute() - listTasksRequest.actions("testAction"); // only pick the main task + listTasksRequest.setActions("testAction"); // only pick the main task response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(1, response.getTasks().size()); assertEquals(mainTask.getId(), response.getTasks().get(0).getId()); @@ -417,7 +417,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Get the parent task ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction"); + listTasksRequest.setActions("testAction"); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(1, response.getTasks().size()); String parentNode = response.getTasks().get(0).getNode().getId(); @@ -425,7 +425,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Find tasks with common parent listTasksRequest = new ListTasksRequest(); - listTasksRequest.parentTaskId(new TaskId(parentNode, parentTaskId)); + listTasksRequest.setParentTaskId(new TaskId(parentNode, parentTaskId)); response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getTasks().size()); for (TaskInfo task : response.getTasks()) { @@ -451,7 +451,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Get the parent task ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction*"); + listTasksRequest.setActions("testAction*"); ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(0, response.getTasks().size()); @@ -472,7 +472,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with filtering TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)]; ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions("testAction[n]"); // only pick node actions + listTasksRequest.setActions("testAction[n]"); // only pick node actions ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -482,7 +482,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Check task counts using transport with detailed description long minimalDurationNanos = System.nanoTime() - maximumStartTimeNanos; - listTasksRequest.detailed(true); // same request only with detailed description + listTasksRequest.setDetailed(true); // same request only with detailed description response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getPerNodeTasks().size()); for (Map.Entry> entry : response.getPerNodeTasks().entrySet()) { @@ -518,9 +518,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Try to cancel main task using action name CancelTasksRequest request = new CancelTasksRequest(); - request.nodesIds(testNodes[0].discoveryNode.getId()); - request.reason("Testing Cancellation"); - request.actions(actionName); + request.setNodesIds(testNodes[0].discoveryNode.getId()); + request.setReason("Testing Cancellation"); + request.setActions(actionName); CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request) .get(); @@ -532,8 +532,8 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Try to cancel main task using id request = new CancelTasksRequest(); - request.reason("Testing Cancellation"); - request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId())); + request.setReason("Testing Cancellation"); + request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId())); response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get(); // Shouldn't match any tasks since testAction doesn't support cancellation @@ -544,7 +544,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Make sure that task is still running ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.actions(actionName); + listTasksRequest.setActions(actionName); ListTasksResponse listResponse = testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction.execute (listTasksRequest).get(); assertEquals(1, listResponse.getPerNodeTasks().size()); @@ -617,7 +617,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Run task action on node tasks that are currently running // should be successful on all nodes except one TestTasksRequest testTasksRequest = new TestTasksRequest(); - testTasksRequest.actions("testAction[n]"); // pick all test actions + testTasksRequest.setActions("testAction[n]"); // pick all test actions TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get(); // Get successful responses from all nodes except one assertEquals(testNodes.length - 1, response.tasks.size()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java index a2d838bc3fd..fc04de81254 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java @@ -55,13 +55,7 @@ public class ClusterStateRequestTests extends ESTestCase { assertThat(deserializedCSRequest.nodes(), equalTo(clusterStateRequest.nodes())); assertThat(deserializedCSRequest.blocks(), equalTo(clusterStateRequest.blocks())); assertThat(deserializedCSRequest.indices(), equalTo(clusterStateRequest.indices())); - - if (testVersion.onOrAfter(Version.V_1_5_0)) { - assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions()); - } else { - // versions before V_1_5_0 use IndicesOptions.lenientExpandOpen() - assertOptionsMatch(deserializedCSRequest.indicesOptions(), IndicesOptions.lenientExpandOpen()); - } + assertOptionsMatch(deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions()); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index 9d8002210e7..baca9508a8b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -98,13 +98,7 @@ public class UpgradeIT extends ESBackcompatTestCase { } indexRandom(true, docs); ensureGreen(indexName); - if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) { - // before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we - // need to continue to try flushing until all shards succeed - assertTrue(awaitBusy(() -> flush(indexName).getFailedShards() == 0)); - } else { - assertEquals(0, flush(indexName).getFailedShards()); - } + assertEquals(0, flush(indexName).getFailedShards()); // index more docs that won't be flushed numDocs = scaledRandomIntBetween(100, 1000); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index 45986eab00e..3c269c39004 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -131,7 +131,7 @@ public class BootstrapCheckTests extends ESTestCase { } public void testMaxNumberOfThreadsCheck() { - final int limit = 1 << 15; + final int limit = 1 << 11; final AtomicLong maxNumberOfThreads = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapCheck.MaxNumberOfThreadsCheck check = new BootstrapCheck.MaxNumberOfThreadsCheck() { @Override diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 9a0316050b1..9ea9b340c20 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -104,17 +104,8 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { } private String randomAnalyzer() { - while(true) { - PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values()); - if (preBuiltAnalyzers == PreBuiltAnalyzers.SORANI && compatibilityVersion().before(Version.V_1_3_0)) { - continue; // SORANI was added in 1.3.0 - } - if (preBuiltAnalyzers == PreBuiltAnalyzers.LITHUANIAN && compatibilityVersion().before(Version.V_2_1_0)) { - continue; // LITHUANIAN was added in 2.1.0 - } - return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT); - } - + PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values()); + return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT); } private static final class InputOutput { @@ -127,7 +118,5 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { this.input = input; this.field = field; } - - } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 5b7c4fa37ba..7e46825398b 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -188,10 +188,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i)); } indexRandom(true, docs); - if (compatibilityVersion().before(Version.V_1_3_0)) { - // issue another refresh through a new node to side step issue #6545 - assertNoFailures(backwardsCluster().internalCluster().dataNodeClient().admin().indices().prepareRefresh().setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); - } numDocs *= 2; } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index b13cee98565..8e3dbd5f563 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -446,7 +446,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // #10067: create-bwc-index.py deleted any doc with long_sort:[10-20] void assertDeleteByQueryWorked(String indexName, Version version) throws Exception { - if (version.onOrBefore(Version.V_1_0_0_Beta2) || version.onOrAfter(Version.V_2_0_0_beta1)) { + if (version.onOrAfter(Version.V_2_0_0_beta1)) { // TODO: remove this once #10262 is fixed return; } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index ec73edd493f..483040209d0 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -194,14 +194,11 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1)); assertThat(template.mappings().size(), equalTo(1)); assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}")); - if (Version.fromString(version).onOrAfter(Version.V_1_1_0)) { - // Support for aliases in templates was added in v1.1.0 - assertThat(template.aliases().size(), equalTo(3)); - assertThat(template.aliases().get("alias1"), notNullValue()); - assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); - assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy")); - assertThat(template.aliases().get("{index}-alias"), notNullValue()); - } + assertThat(template.aliases().size(), equalTo(3)); + assertThat(template.aliases().get("alias1"), notNullValue()); + assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); + assertThat(template.aliases().get("alias2").indexRouting(), equalTo("kimchy")); + assertThat(template.aliases().get("{index}-alias"), notNullValue()); logger.info("--> cleanup"); cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index a43da9e53fa..0d8784834fa 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -83,7 +83,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) .put(indexSettings) .build(); IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index d911a1175c7..94336d23623 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -109,7 +109,12 @@ public class PrimaryAllocationIT extends ESIntegTestCase { logger.info("--> check that old primary shard does not get promoted to primary again"); // kick reroute and wait for all shard states to be fetched client(master).admin().cluster().prepareReroute().get(); - assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); + assertBusy(new Runnable() { + @Override + public void run() { + assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0)); + } + }); // kick reroute a second time and check that all shards are unassigned assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); } diff --git a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java index f4b8747ccdc..3770cd25c10 100644 --- a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java +++ b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java @@ -20,8 +20,7 @@ package org.elasticsearch.common; import org.elasticsearch.test.ESTestCase; -import java.util.EnumSet; - +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.sameInstance; @@ -33,38 +32,29 @@ public class ParseFieldTests extends ESTestCase { String[] deprecated = new String[]{"barFoo", "bar_foo"}; ParseField withDeprecations = field.withDeprecation("Foobar", randomFrom(deprecated)); assertThat(field, not(sameInstance(withDeprecations))); - assertThat(field.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true)); - assertThat(field.match("foo bar", ParseField.EMPTY_FLAGS), is(false)); - assertThat(field.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(false)); - assertThat(field.match("barFoo", ParseField.EMPTY_FLAGS), is(false)); + assertThat(field.match(randomFrom(values), false), is(true)); + assertThat(field.match("foo bar", false), is(false)); + assertThat(field.match(randomFrom(deprecated), false), is(false)); + assertThat(field.match("barFoo", false), is(false)); - assertThat(withDeprecations.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true)); - assertThat(withDeprecations.match("foo bar", ParseField.EMPTY_FLAGS), is(false)); - assertThat(withDeprecations.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(true)); - assertThat(withDeprecations.match("barFoo", ParseField.EMPTY_FLAGS), is(true)); + assertThat(withDeprecations.match(randomFrom(values), false), is(true)); + assertThat(withDeprecations.match("foo bar", false), is(false)); + assertThat(withDeprecations.match(randomFrom(deprecated), false), is(true)); + assertThat(withDeprecations.match("barFoo", false), is(true)); // now with strict mode - EnumSet flags = EnumSet.of(ParseField.Flag.STRICT); - assertThat(field.match(randomFrom(values), flags), is(true)); - assertThat(field.match("foo bar", flags), is(false)); - assertThat(field.match(randomFrom(deprecated), flags), is(false)); - assertThat(field.match("barFoo", flags), is(false)); + assertThat(field.match(randomFrom(values), true), is(true)); + assertThat(field.match("foo bar", true), is(false)); + assertThat(field.match(randomFrom(deprecated), true), is(false)); + assertThat(field.match("barFoo", true), is(false)); - assertThat(withDeprecations.match(randomFrom(values), flags), is(true)); - assertThat(withDeprecations.match("foo bar", flags), is(false)); - try { - withDeprecations.match(randomFrom(deprecated), flags); - fail(); - } catch (IllegalArgumentException ex) { - - } - - try { - withDeprecations.match("barFoo", flags); - fail(); - } catch (IllegalArgumentException ex) { - - } + assertThat(withDeprecations.match(randomFrom(values), true), is(true)); + assertThat(withDeprecations.match("foo bar", true), is(false)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> withDeprecations.match(randomFrom(deprecated), true)); + assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead")); + e = expectThrows(IllegalArgumentException.class, () -> withDeprecations.match("barFoo", true)); + assertThat(e.getMessage(), containsString("Deprecated field [barFoo] used, expected [foo_bar] instead")); } public void testAllDeprecated() { @@ -72,30 +62,29 @@ public class ParseFieldTests extends ESTestCase { boolean withDeprecatedNames = randomBoolean(); String[] deprecated = new String[]{"text", "same_as_text"}; - String[] allValues = values; + String[] allValues; if (withDeprecatedNames) { - String[] newArray = new String[allValues.length + deprecated.length]; - System.arraycopy(allValues, 0, newArray, 0, allValues.length); - System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length); + String[] newArray = new String[values.length + deprecated.length]; + System.arraycopy(values, 0, newArray, 0, values.length); + System.arraycopy(deprecated, 0, newArray, values.length, deprecated.length); allValues = newArray; + } else { + allValues = values; } - ParseField field = new ParseField(randomFrom(values)); + ParseField field; if (withDeprecatedNames) { - field = field.withDeprecation(deprecated); + field = new ParseField(randomFrom(values)).withDeprecation(deprecated).withAllDeprecated("like"); + } else { + field = new ParseField(randomFrom(values)).withAllDeprecated("like"); } - field = field.withAllDeprecated("like"); // strict mode off - assertThat(field.match(randomFrom(allValues), ParseField.EMPTY_FLAGS), is(true)); - assertThat(field.match("not a field name", ParseField.EMPTY_FLAGS), is(false)); + assertThat(field.match(randomFrom(allValues), false), is(true)); + assertThat(field.match("not a field name", false), is(false)); // now with strict mode - EnumSet flags = EnumSet.of(ParseField.Flag.STRICT); - try { - field.match(randomFrom(allValues), flags); - fail(); - } catch (IllegalArgumentException ex) { - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> field.match(randomFrom(allValues), true)); + assertThat(e.getMessage(), containsString(" used, replaced by [like]")); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 6e4d3867fde..566d2148cae 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.common.geo; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; -import com.spatial4j.core.shape.jts.JtsGeometry; -import com.spatial4j.core.shape.jts.JtsPoint; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java index 06fadffc806..abbd6ce40aa 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.geo; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.impl.PointImpl; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.impl.PointImpl; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Polygon; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java index 305e57fbaf1..881db868ef9 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/EnvelopeBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo.builders; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.test.geo.RandomShapeGenerator; diff --git a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java deleted file mode 100644 index a4a5972e45b..00000000000 --- a/core/src/test/java/org/elasticsearch/common/lucene/IndexCacheableQueryTests.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene; - -import org.apache.lucene.document.Document; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LRUQueryCache; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.store.Directory; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.Set; - -public class IndexCacheableQueryTests extends ESTestCase { - - static class DummyIndexCacheableQuery extends IndexCacheableQuery { - @Override - public String toString(String field) { - return "DummyIndexCacheableQuery"; - } - - @Override - public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new Weight(this) { - - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } - - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public float getValueForNormalization() throws IOException { - return 0; - } - - @Override - public void normalize(float norm, float topLevelBoost) { - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - }; - } - } - - public void testBasics() throws IOException { - DummyIndexCacheableQuery query = new DummyIndexCacheableQuery(); - QueryUtils.check(query); - - Query rewritten = query.rewrite(new MultiReader(new IndexReader[0])); - QueryUtils.check(rewritten); - QueryUtils.checkUnequal(query, rewritten); - - Query rewritten2 = query.rewrite(new MultiReader(new IndexReader[0])); - QueryUtils.check(rewritten2); - QueryUtils.checkUnequal(rewritten, rewritten2); - } - - public void testCache() throws IOException { - Directory dir = newDirectory(); - LRUQueryCache cache = new LRUQueryCache(10000, Long.MAX_VALUE); - QueryCachingPolicy policy = QueryCachingPolicy.ALWAYS_CACHE; - RandomIndexWriter writer = new RandomIndexWriter(getRandom(), dir); - for (int i = 0; i < 10; ++i) { - writer.addDocument(new Document()); - } - - IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); - reader = searcher.getIndexReader(); // reader might be wrapped - searcher.setQueryCache(cache); - searcher.setQueryCachingPolicy(policy); - - assertEquals(0, cache.getCacheSize()); - DummyIndexCacheableQuery query = new DummyIndexCacheableQuery(); - searcher.count(query); - int expectedCacheSize = reader.leaves().size(); - assertEquals(expectedCacheSize, cache.getCacheSize()); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - - writer.addDocument(new Document()); - - IndexReader reader2 = writer.getReader(); - searcher = newSearcher(reader2); - reader2 = searcher.getIndexReader(); // reader might be wrapped - searcher.setQueryCache(cache); - searcher.setQueryCachingPolicy(policy); - - // since the query is only cacheable at the index level, it has to be recomputed on all leaves - expectedCacheSize += reader2.leaves().size(); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - searcher.count(query); - assertEquals(expectedCacheSize, cache.getCacheSize()); - - reader.close(); - reader2.close(); - writer.close(); - assertEquals(0, cache.getCacheSize()); - dir.close(); - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 484b88f096f..8df6f5c78cc 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -84,7 +84,6 @@ public class LuceneTests extends ESTestCase { // now shadow engine should try to be created latch.countDown(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -104,7 +103,6 @@ public class LuceneTests extends ESTestCase { public void testCleanIndex() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -130,7 +128,7 @@ public class LuceneTests extends ESTestCase { writer.deleteDocuments(new Term("id", "2")); writer.commit(); - try (DirectoryReader open = DirectoryReader.open(writer, true)) { + try (DirectoryReader open = DirectoryReader.open(writer)) { assertEquals(3, open.numDocs()); assertEquals(1, open.numDeletedDocs()); assertEquals(4, open.maxDoc()); @@ -158,7 +156,6 @@ public class LuceneTests extends ESTestCase { public void testPruneUnreferencedFiles() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); iwc.setMergePolicy(NoMergePolicy.INSTANCE); @@ -186,7 +183,7 @@ public class LuceneTests extends ESTestCase { writer.deleteDocuments(new Term("id", "2")); writer.commit(); - DirectoryReader open = DirectoryReader.open(writer, true); + DirectoryReader open = DirectoryReader.open(writer); assertEquals(3, open.numDocs()); assertEquals(1, open.numDeletedDocs()); assertEquals(4, open.maxDoc()); @@ -215,7 +212,6 @@ public class LuceneTests extends ESTestCase { public void testFiles() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setMaxBufferedDocs(2); @@ -279,7 +275,6 @@ public class LuceneTests extends ESTestCase { public void testNumDocs() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); - dir.setEnableVirusScanner(false); IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); @@ -369,6 +364,6 @@ public class LuceneTests extends ESTestCase { */ public void testMMapHackSupported() throws Exception { // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue(MMapDirectory.UNMAP_SUPPORTED); + assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); } } diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java index 9e7f54b8323..f2dc53e44cd 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java @@ -152,7 +152,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); Query query = new AllTermQuery(new Term("_all", "else")); @@ -198,7 +198,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); // this one is boosted. so the second doc is more relevant @@ -244,7 +244,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); assertEquals(2, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); @@ -280,7 +280,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10); @@ -330,7 +330,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10); @@ -368,7 +368,7 @@ public class SimpleAllTests extends ESTestCase { indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java index 7fb3ec0c2e9..817dabfece3 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java @@ -55,7 +55,7 @@ public class ESDirectoryReaderTests extends ESTestCase { // open reader ShardId shardId = new ShardId("fake", "_na_", 1); - DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw, true), shardId); + DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId); assertEquals(2, ir.numDocs()); assertEquals(1, ir.leaves().size()); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 51d2ba77ec5..3d1b0fdb842 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -137,7 +137,7 @@ public class FreqTermsEnumTests extends ESTestCase { } // now go over each doc, build the relevant references and filter - reader = DirectoryReader.open(iw, true); + reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); for (int docId = 0; docId < reader.maxDoc(); docId++) { Document doc = reader.document(docId); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java index 9098289847e..23b6939fe7a 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java @@ -27,15 +27,12 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; public class MultiPhrasePrefixQueryTests extends ESTestCase { public void testSimple() throws Exception { @@ -43,7 +40,7 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase { Document doc = new Document(); doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); - IndexReader reader = DirectoryReader.open(writer, true); + IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); @@ -66,22 +63,4 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase { query.add(new Term("field", "xxx")); assertThat(searcher.count(query), equalTo(0)); } - - public void testBoost() throws Exception { - IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - Document doc = new Document(); - doc.add(new Field("field", "aaa bbb", TextField.TYPE_NOT_STORED)); - writer.addDocument(doc); - doc = new Document(); - doc.add(new Field("field", "ccc ddd", TextField.TYPE_NOT_STORED)); - writer.addDocument(doc); - IndexReader reader = DirectoryReader.open(writer, true); - MultiPhrasePrefixQuery multiPhrasePrefixQuery = new MultiPhrasePrefixQuery(); - multiPhrasePrefixQuery.add(new Term[]{new Term("field", "aaa"), new Term("field", "bb")}); - multiPhrasePrefixQuery.setBoost(randomFloat()); - Query query = multiPhrasePrefixQuery.rewrite(reader); - assertThat(query, instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(multiPhrasePrefixQuery.getBoost())); - } } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java index b0e2ea873c4..0dcce74c1d2 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java @@ -54,7 +54,7 @@ public class MoreLikeThisQueryTests extends ESTestCase { document.add(new TextField("text", "lucene release", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[]{"text"}, Lucene.STANDARD_ANALYZER); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java index 1340d11616c..72b6b2b5eec 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionLookupTests.java @@ -51,7 +51,7 @@ public class VersionLookupTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader()); // found doc @@ -79,7 +79,7 @@ public class VersionLookupTests extends ESTestCase { doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); LeafReaderContext segment = reader.leaves().get(0); PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader()); // return the last doc when there are duplicates diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java index 1221445e8a6..7f405ea0531 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java @@ -78,7 +78,7 @@ public class VersionsTests extends ESTestCase { public void testVersions() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -145,7 +145,7 @@ public class VersionsTests extends ESTestCase { docs.add(doc); writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5L)); assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5L)); @@ -170,7 +170,7 @@ public class VersionsTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -305,7 +305,7 @@ public class VersionsTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); // should increase cache size by 1 assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); @@ -330,7 +330,7 @@ public class VersionsTests extends ESTestCase { doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); // now wrap the reader diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 6cc9912924d..a190de5b702 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -216,6 +216,13 @@ public class ScopedSettingsTests extends ESTestCase { } catch (IllegalArgumentException e) { assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); } + + try { + settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("illegal value for [index.similarity.classic] cannot redefine built-in similarity", e.getMessage()); + } } diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 9c702acb2c4..1455b397e74 100644 --- a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.deps.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; @@ -51,7 +51,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -74,9 +74,9 @@ public class SimpleLuceneTests extends ESTestCase { document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } - IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter, true)); + IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); IndexSearcher searcher = new IndexSearcher(reader); - TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, 10, new Sort(new SortField("str", SortField.Type.STRING))); + TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i]; assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)})))); @@ -89,10 +89,10 @@ public class SimpleLuceneTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new IntField("test", 2, IntField.TYPE_STORED)); + document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); Document doc = searcher.doc(topDocs.scoreDocs[0].doc); @@ -100,7 +100,7 @@ public class SimpleLuceneTests extends ESTestCase { assertThat(f.stringValue(), equalTo("2")); BytesRefBuilder bytes = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(2, 0, bytes); + LegacyNumericUtils.intToPrefixCoded(2, 0, bytes); topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1); doc = searcher.doc(topDocs.scoreDocs[0].doc); f = doc.getField("test"); @@ -123,7 +123,7 @@ public class SimpleLuceneTests extends ESTestCase { document.add(new TextField("#id", "1", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); @@ -162,7 +162,7 @@ public class SimpleLuceneTests extends ESTestCase { indexWriter.addDocument(document); } - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TermQuery query = new TermQuery(new Term("value", "value")); TopDocs topDocs = searcher.search(query, 100); @@ -179,7 +179,7 @@ public class SimpleLuceneTests extends ESTestCase { public void testNRTSearchOnClosedWriter() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader reader = DirectoryReader.open(indexWriter, true); + DirectoryReader reader = DirectoryReader.open(indexWriter); for (int i = 0; i < 100; i++) { Document document = new Document(); @@ -205,26 +205,26 @@ public class SimpleLuceneTests extends ESTestCase { IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document doc = new Document(); - FieldType type = IntField.TYPE_NOT_STORED; - IntField field = new IntField("int1", 1, type); + FieldType type = LegacyIntField.TYPE_NOT_STORED; + LegacyIntField field = new LegacyIntField("int1", 1, type); doc.add(field); - type = new FieldType(IntField.TYPE_NOT_STORED); + type = new FieldType(LegacyIntField.TYPE_NOT_STORED); type.setIndexOptions(IndexOptions.DOCS_AND_FREQS); type.freeze(); - field = new IntField("int1", 1, type); + field = new LegacyIntField("int1", 1, type); doc.add(field); - field = new IntField("int2", 1, type); + field = new LegacyIntField("int2", 1, type); doc.add(field); - field = new IntField("int2", 1, type); + field = new LegacyIntField("int2", 1, type); doc.add(field); indexWriter.addDocument(doc); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); LeafReader atomicReader = SlowCompositeReaderWrapper.wrap(reader); Terms terms = atomicReader.terms("int1"); diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 66dc0542678..fbb5115903c 100644 --- a/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/core/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.deps.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -53,10 +54,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -75,10 +80,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -87,12 +96,12 @@ public class VectorHighlighterTests extends ESTestCase { FastVectorHighlighter highlighter = new FastVectorHighlighter(); PrefixQuery prefixQuery = new PrefixQuery(new Term("content", "ba")); - assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName())); + assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName())); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(prefixQuery), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, nullValue()); - prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_QUERY_REWRITE); + prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_REWRITE); Query rewriteQuery = prefixQuery.rewrite(reader); fragment = highlighter.getBestFragment(highlighter.getFieldQuery(rewriteQuery), reader, topDocs.scoreDocs[0].doc, "content", 30); @@ -100,7 +109,7 @@ public class VectorHighlighterTests extends ESTestCase { // now check with the custom field query prefixQuery = new PrefixQuery(new Term("content", "ba")); - assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE.getClass().getName())); + assertThat(prefixQuery.getRewriteMethod().getClass().getName(), equalTo(PrefixQuery.CONSTANT_SCORE_REWRITE.getClass().getName())); fragment = highlighter.getBestFragment(new CustomFieldQuery(prefixQuery, reader, highlighter), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, notNullValue()); @@ -112,10 +121,14 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); + FieldType vectorsType = new FieldType(TextField.TYPE_NOT_STORED); + vectorsType.setStoreTermVectors(true); + vectorsType.setStoreTermVectorPositions(true); + vectorsType.setStoreTermVectorOffsets(true); + document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); @@ -133,10 +146,10 @@ public class VectorHighlighterTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)); + document.add(new TextField("content", "the big bad dog", Field.Store.YES)); indexWriter.addDocument(document); - IndexReader reader = DirectoryReader.open(indexWriter, true); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index ee92945c4ff..9ad10cc3888 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -275,10 +275,10 @@ public class ZenDiscoveryIT extends ESIntegTestCase { Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally .build(); - String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1); + String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0); ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName); ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); - DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_1_6_0); + DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_2_0_0); final AtomicReference holder = new AtomicReference<>(); zenDiscovery.handleJoinRequest(node, clusterService.state(), new MembershipAction.JoinCallback() { @Override @@ -292,16 +292,16 @@ public class ZenDiscoveryIT extends ESIntegTestCase { }); assertThat(holder.get(), notNullValue()); - assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [1.6.0] that is lower than the minimum compatible version [" + Version.V_2_0_0_beta1.minimumCompatibilityVersion() + "]")); + assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0.minimumCompatibilityVersion() + "]")); } public void testJoinElectedMaster_incompatibleMinVersion() { - ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0_beta1); + ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0); - DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0_beta1); + DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0); assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node)); - node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0); - assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue()); + node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0); + assertThat("Can't join master because version 2.0.0 is lower than the minimum compatable version 5.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue()); } public void testDiscoveryStats() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index b247dad069e..88d375699a1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -82,11 +81,6 @@ public class UnicastZenPingIT extends ESTestCase { return DiscoveryNodes.builder().put(nodeA).localNodeId("UZP_A").build(); } - @Override - public NodeService nodeService() { - return null; - } - @Override public boolean nodeHasJoinedClusterOnce() { return false; @@ -101,11 +95,6 @@ public class UnicastZenPingIT extends ESTestCase { return DiscoveryNodes.builder().put(nodeB).localNodeId("UZP_B").build(); } - @Override - public NodeService nodeService() { - return null; - } - @Override public boolean nodeHasJoinedClusterOnce() { return true; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 224ecbdf619..7e31f6055de 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.node.Node; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -134,11 +133,6 @@ public class PublishClusterStateActionTests extends ESTestCase { return clusterState.nodes(); } - @Override - public NodeService nodeService() { - assert false; - throw new UnsupportedOperationException("Shouldn't be here"); - } } public MockNode createMockNode(final String name) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 68a4df685be..64d293e8bd0 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -65,11 +65,6 @@ import static org.hamcrest.Matchers.startsWith; public class GetActionIT extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(InternalSettingsPlugin.class); // uses index.version.created - } - public void testSimpleGet() { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) @@ -324,128 +319,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); } - public void testThatGetFromTranslogShouldWorkWithExcludeBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("excludes", "excluded") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); - client().admin().indices().prepareFlush(index).get(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); - - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field")); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - } - - public void testThatGetFromTranslogShouldWorkWithIncludeBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("includes", "included") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); - flush(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); - - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - } - - @SuppressWarnings("unchecked") - public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFieldsBackcompat() throws Exception { - String index = "test"; - String type = "type1"; - - String mapping = jsonBuilder() - .startObject() - .startObject(type) - .startObject("_source") - .array("includes", "included") - .array("excludes", "excluded") - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(prepareCreate(index) - .addMapping(type, mapping) - .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); - - client().prepareIndex(index, type, "1") - .setSource(jsonBuilder().startObject() - .field("field", "1", "2") - .startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject() - .startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject() - .endObject()) - .get(); - - GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); - assertThat(responseBeforeFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); - - // now tests that extra source filtering works as expected - GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") - .setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get(); - assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true)); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded"))); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field"))); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included")); - assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field")); - assertThat((Map) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2"))); - - flush(); - GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); - GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") - .setFetchSource("*.field", "*.field2").get(); - - assertThat(responseAfterFlush.isExists(), is(true)); - assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); - - assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true)); - assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString())); - } - public void testGetWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); @@ -1002,12 +875,11 @@ public class GetActionIT extends ESIntegTestCase { void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; + String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + @@ -1054,12 +926,11 @@ public class GetActionIT extends ESIntegTestCase { } void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { - String storedString = stored ? "yes" : "no"; + String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + - " \"refresh_interval\": \"-1\",\n" + - " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/IndexTests.java b/core/src/test/java/org/elasticsearch/index/IndexTests.java new file mode 100644 index 00000000000..6ce38c6acba --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/IndexTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class IndexTests extends ESTestCase { + public void testToString() { + assertEquals("[name/uuid]", new Index("name", "uuid").toString()); + assertEquals("[name]", new Index("name", ClusterState.UNKNOWN_UUID).toString()); + + Index random = new Index(randomSimpleString(random(), 1, 100), + usually() ? Strings.randomBase64UUID(random()) : ClusterState.UNKNOWN_UUID); + assertThat(random.toString(), containsString(random.getName())); + if (ClusterState.UNKNOWN_UUID.equals(random.getUUID())) { + assertThat(random.toString(), not(containsString(random.getUUID()))); + } else { + assertThat(random.toString(), containsString(random.getUUID())); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 8fd6876b4b2..e3676366511 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index; import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.StringField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new IntField("version", 1, Store.YES), "id", + ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id", "test", null, 0, -1, null, source, null); // Turning off document logging doesn't log source[] diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 1eb1e93f09c..6468fae9397 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.SimpleFSDirectory; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.ModuleTestCase; @@ -106,7 +108,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings2 = settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .build(); AnalysisRegistry newRegistry = getNewRegistry(settings2); AnalysisService analysisService2 = getAnalysisService(newRegistry, settings2); @@ -119,8 +121,8 @@ public class AnalysisModuleTests extends ModuleTestCase { // analysis service has the expected version assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class))); - assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion()); - assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion()); + assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion()); + assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion()); assertThat(analysisService2.analyzer("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion()); @@ -268,45 +270,6 @@ public class AnalysisModuleTests extends ModuleTestCase { } } - public void testBackwardCompatible() throws IOException { - Settings settings = settingsBuilder() - .put("index.analysis.analyzer.custom1.tokenizer", "standard") - .put("index.analysis.analyzer.custom1.position_offset_gap", "128") - .put("index.analysis.analyzer.custom2.tokenizer", "standard") - .put("index.analysis.analyzer.custom2.position_increment_gap", "256") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)) - .build(); - AnalysisService analysisService = getAnalysisService(settings); - - Analyzer custom1 = analysisService.analyzer("custom1").analyzer(); - assertThat(custom1, instanceOf(CustomAnalyzer.class)); - assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128)); - - Analyzer custom2 = analysisService.analyzer("custom2").analyzer(); - assertThat(custom2, instanceOf(CustomAnalyzer.class)); - assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256)); - } - - public void testWithBothSettings() throws IOException { - Settings settings = settingsBuilder() - .put("index.analysis.analyzer.custom.tokenizer", "standard") - .put("index.analysis.analyzer.custom.position_offset_gap", "128") - .put("index.analysis.analyzer.custom.position_increment_gap", "256") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, - Version.V_1_7_1)) - .build(); - try { - getAnalysisService(settings); - fail("Analyzer has both position_offset_gap and position_increment_gap should fail"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" + - ", use only [position_increment_gap]")); - } - } - public void testDeprecatedPositionOffsetGap() throws IOException { Settings settings = settingsBuilder() .put("index.analysis.analyzer.custom.tokenizer", "standard") @@ -328,11 +291,14 @@ public class AnalysisModuleTests extends ModuleTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - AnalysisModule module = new AnalysisModule(new Environment(settings)); + Environment environment = new Environment(settings); + AnalysisModule module = new AnalysisModule(environment); InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); - Dictionary dictionary = new Dictionary(aff, dic); - module.registerHunspellDictionary("foo", dictionary); - assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) { + Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic); + module.registerHunspellDictionary("foo", dictionary); + assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java deleted file mode 100644 index a163d9e42b4..00000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTokenStreamTestCase; - -import java.io.IOException; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; - -public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase { - - private void assertNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException { - final int iters = scaledRandomIntBetween(10, 100); - org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT; - for (int i = 0; i < iters; i++) { - Settings.Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop"); - if (version.onOrAfter(noStopwordVersion)) { - if (random().nextBoolean()) { - builder.put(SETTING_VERSION_CREATED, version); - } - } else { - builder.put(SETTING_VERSION_CREATED, version); - } - builder.put("index.analysis.analyzer.foo.type", type); - builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); - NamedAnalyzer analyzer = analysisService.analyzer("foo"); - assertNotNull(analyzer); - if (version.onOrAfter(noStopwordVersion)) { - assertAnalyzesTo(analyzer, "this is bogus", new String[]{"this", "is", "bogus"}); - } else { - assertAnalyzesTo(analyzer, "this is bogus", new String[]{"bogus"}); - } - version = randomVersion(); - } - } - - public void testPatternAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "pattern"); - } - - public void testStandardHTMLStripAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_RC1, "standard_html_strip"); - } - - public void testStandardAnalyzer() throws IOException { - assertNoStopwordsAfter(org.elasticsearch.Version.V_1_0_0_Beta1, "standard"); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java index 9d8efb1de4b..5e1cf2e8179 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; -import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -120,45 +119,20 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { final Index index = new Index("test", "_na_"); final String name = "ngr"; Version v = randomVersion(random()); - if (v.onOrAfter(Version.V_0_90_2)) { - Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); - boolean compatVersion = false; - if ((compatVersion = random().nextBoolean())) { - builder.put("version", "4." + random().nextInt(3)); - } - boolean reverse = random().nextBoolean(); - if (reverse) { - builder.put("side", "back"); - } - Settings settings = builder.build(); - Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); - Tokenizer tokenizer = new MockTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); - if (reverse) { - assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); - } else if (compatVersion) { - assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class)); - } else { - assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class)); - } - + Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); + boolean reverse = random().nextBoolean(); + if (reverse) { + builder.put("side", "back"); + } + Settings settings = builder.build(); + Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); + if (reverse) { + assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); } else { - Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); - boolean reverse = random().nextBoolean(); - if (reverse) { - builder.put("side", "back"); - } - Settings settings = builder.build(); - Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build(); - Tokenizer tokenizer = new MockTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(tokenizer); - if (reverse) { - assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); - } else { - assertThat(edgeNGramTokenFilter, instanceOf(Lucene43EdgeNGramTokenFilter.class)); - } + assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class)); } } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java index 89940558d51..10d3d3554dd 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NumericAnalyzerTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.NumericTokenStream; -import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute; +import org.apache.lucene.analysis.LegacyNumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.elasticsearch.test.ESTestCase; @@ -37,10 +37,10 @@ public class NumericAnalyzerTests extends ESTestCase { NumericDoubleAnalyzer analyzer = new NumericDoubleAnalyzer(precisionStep); final TokenStream ts1 = analyzer.tokenStream("dummy", String.valueOf(value)); - final NumericTokenStream ts2 = new NumericTokenStream(precisionStep); + final LegacyNumericTokenStream ts2 = new LegacyNumericTokenStream(precisionStep); ts2.setDoubleValue(value); - final NumericTermAttribute numTerm1 = ts1.addAttribute(NumericTermAttribute.class); - final NumericTermAttribute numTerm2 = ts1.addAttribute(NumericTermAttribute.class); + final LegacyNumericTermAttribute numTerm1 = ts1.addAttribute(LegacyNumericTermAttribute.class); + final LegacyNumericTermAttribute numTerm2 = ts1.addAttribute(LegacyNumericTermAttribute.class); final PositionIncrementAttribute posInc1 = ts1.addAttribute(PositionIncrementAttribute.class); final PositionIncrementAttribute posInc2 = ts1.addAttribute(PositionIncrementAttribute.class); ts1.reset(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java deleted file mode 100644 index 2cb8f99e7b8..00000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactoryTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; - -/** - * - */ -public class PreBuiltAnalyzerProviderFactoryTests extends ESTestCase { - public void testVersioningInFactoryProvider() throws Exception { - PreBuiltAnalyzerProviderFactory factory = new PreBuiltAnalyzerProviderFactory("default", AnalyzerScope.INDEX, PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); - - AnalyzerProvider former090AnalyzerProvider = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - AnalyzerProvider currentAnalyzerProviderReference = factory.create("default", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - // would love to access the version inside of the lucene analyzer, but that is not possible... - assertThat(currentAnalyzerProviderReference, is(not(former090AnalyzerProvider))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index fbb69ea1eb0..06a242c8277 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -59,20 +59,18 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_0_18_0))); + is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_2_0_0))); } public void testThatInstancesAreCachedAndReused() { - assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT))); - assertThat(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0), - is(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_0_18_0))); - } + assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)); + // same lucene version should be cached + assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_1)); - public void testThatInstancesWithSameLuceneVersionAreReused() { - // both are lucene 4.4 and should return the same instance - assertThat(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_4), - is(PreBuiltAnalyzers.CATALAN.getAnalyzer(Version.V_0_90_5))); + assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), + PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0)); } public void testThatAnalyzersAreUsedInMapping() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java deleted file mode 100644 index 39de728a484..00000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactoryTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltCharFilters; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; - -/** - * - */ -public class PreBuiltCharFilterFactoryFactoryTests extends ESTestCase { - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltCharFilterFactoryFactory factory = new PreBuiltCharFilterFactoryFactory(PreBuiltCharFilters.HTML_STRIP.getCharFilterFactory(Version.CURRENT)); - - CharFilterFactory former090TokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - CharFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build()); - CharFilterFactory currentTokenizerFactory = factory.get(null, null, "html_strip", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(former090TokenizerFactory)); - assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java deleted file mode 100644 index 670df069926..00000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenFilterFactoryFactoryTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; - -/** - * - */ -public class PreBuiltTokenFilterFactoryFactoryTests extends ESTestCase { - public void testThatCachingWorksForCachingStrategyOne() throws IOException { - PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.WORD_DELIMITER.getTokenFilterFactory(Version.CURRENT)); - - TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "word_delimiter", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(former090TokenizerFactory)); - assertThat(currentTokenizerFactory, is(former090TokenizerFactoryCopy)); - } - - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltTokenFilterFactoryFactory factory = new PreBuiltTokenFilterFactoryFactory(PreBuiltTokenFilters.STOP.getTokenFilterFactory(Version.CURRENT)); - - TokenFilterFactory former090TokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenFilterFactory former090TokenizerFactoryCopy = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenFilterFactory currentTokenizerFactory = factory.get(null, null, "stop", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory))); - assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java deleted file mode 100644 index 162dbb36424..00000000000 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltTokenizerFactoryFactoryTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.analysis.PreBuiltTokenizers; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; - -/** - * - */ -public class PreBuiltTokenizerFactoryFactoryTests extends ESTestCase { - public void testThatDifferentVersionsCanBeLoaded() throws IOException { - PreBuiltTokenizerFactoryFactory factory = new PreBuiltTokenizerFactoryFactory(PreBuiltTokenizers.STANDARD.getTokenizerFactory(Version.CURRENT)); - - // different es versions, same lucene version, thus cached - TokenizerFactory former090TokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_1).build()); - TokenizerFactory former090TokenizerFactoryCopy = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_2).build()); - TokenizerFactory currentTokenizerFactory = factory.get(null, null, "standard", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); - - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactory))); - assertThat(currentTokenizerFactory, is(not(former090TokenizerFactoryCopy))); - assertThat(former090TokenizerFactory, is(former090TokenizerFactoryCopy)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java index 37844dce69d..f0a6077b497 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java @@ -40,10 +40,9 @@ import static org.hamcrest.Matchers.instanceOf; * */ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { - public void testEnglishBackwardsCompatibility() throws IOException { + public void testEnglishFilterFactory() throws IOException { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { - Version v = VersionUtils.randomVersion(random()); Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.my_english.type", "stemmer") @@ -61,19 +60,13 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { tokenizer.setReader(new StringReader("foo bar")); TokenStream create = tokenFilter.create(tokenizer); NamedAnalyzer analyzer = analysisService.analyzer("my_english"); - - if (v.onOrAfter(Version.V_1_3_0)) { - assertThat(create, instanceOf(PorterStemFilter.class)); - assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"}); - } else { - assertThat(create, instanceOf(SnowballFilter.class)); - assertAnalyzesTo(analyzer, "consolingly", new String[]{"consol"}); - } + assertThat(create, instanceOf(PorterStemFilter.class)); + assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"}); } } - public void testPorter2BackwardsCompatibility() throws IOException { + public void testPorter2FilterFactory() throws IOException { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { @@ -95,12 +88,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { TokenStream create = tokenFilter.create(tokenizer); NamedAnalyzer analyzer = analysisService.analyzer("my_porter2"); assertThat(create, instanceOf(SnowballFilter.class)); - - if (v.onOrAfter(Version.V_1_3_0)) { - assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"}); - } else { - assertAnalyzesTo(analyzer, "possibly", new String[]{"possibli"}); - } + assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"}); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index 2804f522afa..d319ab44319 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; @@ -57,14 +56,8 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { public void testCorrectPositionIncrementSetting() throws IOException { Builder builder = Settings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop"); - int thingToDo = random().nextInt(3); - if (thingToDo == 0) { + if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", Version.LATEST); - } else if (thingToDo == 1) { - builder.put("index.analysis.filter.my_stop.version", Version.LUCENE_4_0); - if (random().nextBoolean()) { - builder.put("index.analysis.filter.my_stop.enable_position_increments", true); - } } else { // don't specify } @@ -75,27 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("foo bar")); TokenStream create = tokenFilter.create(tokenizer); - if (thingToDo == 1) { - assertThat(create, instanceOf(Lucene43StopFilter.class)); - } else { - assertThat(create, instanceOf(StopFilter.class)); - } - } - - public void testDeprecatedPositionIncrementSettingWithVersions() throws IOException { - Settings settings = Settings.settingsBuilder() - .put("index.analysis.filter.my_stop.type", "stop") - .put("index.analysis.filter.my_stop.enable_position_increments", false) - .put("index.analysis.filter.my_stop.version", "4.3") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); - TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); - assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class)); - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader("foo bar")); - TokenStream create = tokenFilter.create(tokenizer); - assertThat(create, instanceOf(Lucene43StopFilter.class)); + assertThat(create, instanceOf(StopFilter.class)); } public void testThatSuggestStopFilterWorks() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java index a041694dde6..c23875f8a9a 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java @@ -146,23 +146,4 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } - - /** Back compat: - * old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */ - public void testDeprecatedPartsAndCatenate() throws IOException { - AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") - .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") - .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") - .put("index.analysis.filter.my_word_delimiter.version", "4.7") - .build()); - TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); - String source = "PowerShot"; - String[] expected = new String[]{"Power", "Shot", "PowerShot" }; - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected); - } - } diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 18714fe61ef..e82ed61fbed 100644 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -87,7 +87,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.addDocument(document); writer.commit(); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); IndexSearcher searcher = new IndexSearcher(reader); @@ -112,7 +112,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.forceMerge(1); reader.close(); - reader = DirectoryReader.open(writer, false); + reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); searcher = new IndexSearcher(reader); @@ -138,7 +138,7 @@ public class BitSetFilterCacheTests extends ESTestCase { document.add(new StringField("field", "value", Field.Store.NO)); writer.addDocument(document); writer.commit(); - final DirectoryReader writerReader = DirectoryReader.open(writer, false); + final DirectoryReader writerReader = DirectoryReader.open(writer); final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0)); final AtomicLong stats = new AtomicLong(); @@ -211,7 +211,7 @@ public class BitSetFilterCacheTests extends ESTestCase { newIndexWriterConfig() ); writer.addDocument(new Document()); - DirectoryReader reader = DirectoryReader.open(writer, true); + DirectoryReader reader = DirectoryReader.open(writer); writer.close(); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0)); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 3d912d41c38..4fb31bb4ea9 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -20,18 +20,12 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene40.Lucene40Codec; -import org.apache.lucene.codecs.lucene41.Lucene41Codec; -import org.apache.lucene.codecs.lucene410.Lucene410Codec; -import org.apache.lucene.codecs.lucene42.Lucene42Codec; -import org.apache.lucene.codecs.lucene45.Lucene45Codec; -import org.apache.lucene.codecs.lucene46.Lucene46Codec; -import org.apache.lucene.codecs.lucene49.Lucene49Codec; import org.apache.lucene.codecs.lucene50.Lucene50Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene53.Lucene53Codec; import org.apache.lucene.codecs.lucene54.Lucene54Codec; +import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -64,16 +58,10 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene54Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene60Codec.class)); + assertThat(codecService.codec("Lucene54"), instanceOf(Lucene54Codec.class)); assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class)); assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class)); - assertThat(codecService.codec("Lucene410"), instanceOf(Lucene410Codec.class)); - assertThat(codecService.codec("Lucene49"), instanceOf(Lucene49Codec.class)); - assertThat(codecService.codec("Lucene46"), instanceOf(Lucene46Codec.class)); - assertThat(codecService.codec("Lucene45"), instanceOf(Lucene45Codec.class)); - assertThat(codecService.codec("Lucene40"), instanceOf(Lucene40Codec.class)); - assertThat(codecService.codec("Lucene41"), instanceOf(Lucene41Codec.class)); - assertThat(codecService.codec("Lucene42"), instanceOf(Lucene42Codec.class)); } public void testDefault() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java b/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java deleted file mode 100644 index 8d9c313a9a2..00000000000 --- a/core/src/test/java/org/elasticsearch/index/engine/CommitStatsTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -package org.elasticsearch.index.engine; - -import org.apache.lucene.index.SegmentInfos; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.test.ESTestCase; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - -import static org.elasticsearch.test.VersionUtils.randomVersion; - - -public class CommitStatsTests extends ESTestCase { - public void testStreamingWithNullId() throws IOException { - SegmentInfos segmentInfos = new SegmentInfos(); - CommitStats commitStats = new CommitStats(segmentInfos); - org.elasticsearch.Version targetNodeVersion = randomVersion(random()); - - ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); - OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); - out.setVersion(targetNodeVersion); - commitStats.writeTo(out); - - ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); - InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); - in.setVersion(targetNodeVersion); - CommitStats readCommitStats = CommitStats.readCommitStatsFrom(in); - assertNull(readCommitStats.getId()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index 37e530cc7f4..b6ae9948675 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -170,7 +170,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes assertValues(bytesValues, 1, one()); assertValues(bytesValues, 2, three()); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs.length, equalTo(3)); @@ -226,7 +226,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes fillExtendedMvSet(); IndexFieldData indexFieldData = getForField("value"); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); assertThat(topDocs.totalHits, equalTo(8)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 5c229545755..6f8b5a45df0 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -150,7 +150,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { if (readerContext != null) { readerContext.reader().close(); } - topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); LeafReader reader = SlowCompositeReaderWrapper.wrap(topLevelReader); readerContext = reader.getContext(); return readerContext; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 31a17a684ee..15e4790ca9d 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -265,7 +265,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); @@ -319,7 +319,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI } } final IndexFieldData indexFieldData = getForField("value"); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); @@ -387,7 +387,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI writer.commit(); } } - DirectoryReader directoryReader = DirectoryReader.open(writer, true); + DirectoryReader directoryReader = DirectoryReader.open(writer); directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); IndexFieldData fieldData = getForField("text"); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java index 26ea97dbf15..7ad8653260e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/DuelFieldDataTests.java @@ -125,7 +125,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre); duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -203,7 +203,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataLong(random, context, leftFieldData, rightFieldData); duelFieldDataLong(random, context, rightFieldData, leftFieldData); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -283,7 +283,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataDouble(random, context, leftFieldData, rightFieldData); duelFieldDataDouble(random, context, rightFieldData, leftFieldData); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -341,7 +341,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre); duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { @@ -449,7 +449,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase { duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision); duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision); - DirectoryReader perSegment = DirectoryReader.open(writer, true); + DirectoryReader perSegment = DirectoryReader.open(writer); CompositeReaderContext composite = perSegment.getContext(); List leaves = composite.leaves(); for (LeafReaderContext atomicReaderContext : leaves) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 101e7368353..2d204d1003a 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -114,7 +114,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); writer.addDocument(doc); - DirectoryReader open = DirectoryReader.open(writer, true); + DirectoryReader open = DirectoryReader.open(writer); final boolean wrap = randomBoolean(); final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open; final AtomicInteger onCacheCalled = new AtomicInteger(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index 1e0d8ecdf00..9e1b5d9d167 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -165,7 +165,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { public void testSorting() throws Exception { IndexFieldData indexFieldData = getForField(parentType); - IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false))); @@ -211,7 +211,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { public void testThreads() throws Exception { final ParentChildIndexFieldData indexFieldData = getForField(childType); - final DirectoryReader reader = DirectoryReader.open(writer, true); + final DirectoryReader reader = DirectoryReader.open(writer); final IndexParentChildFieldData global = indexFieldData.loadGlobal(reader); final AtomicReference error = new AtomicReference<>(); final int numThreads = scaledRandomIntBetween(3, 8); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 53d5e1744eb..191ce5d477e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -433,19 +433,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); } - - mapping = jsonBuilder().startObject().startObject("type") - .startObject("_all") - .startObject("fielddata") - .field("format", "doc_values") - .endObject().endObject().endObject().endObject().string(); - Settings legacySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - try { - createIndex("test_old", legacySettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); - } } public void testAutoBoost() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 7bed3ce091f..74fc98fddbe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -85,7 +85,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { try (Directory dir = new RAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(getRandom())))) { w.addDocuments(doc.docs()); - try (DirectoryReader reader = DirectoryReader.open(w, true)) { + try (DirectoryReader reader = DirectoryReader.open(w)) { final LeafReader leaf = reader.leaves().get(0).reader(); // boolean fields are indexed and have doc values by default assertEquals(new BytesRef("T"), leaf.terms("field").iterator().next()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java index bdb3f9762ef..8af92f266a5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; @@ -200,4 +201,35 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(1, fields.length); assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); } + + public void testIndexOptions() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword") + .field("index_options", "freqs").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); + + for (String indexOptions : Arrays.asList("positions", "offsets")) { + final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword") + .field("index_options", indexOptions).endObject().endObject() + .endObject().endObject().string(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping2))); + assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 3056b63b4c0..4f4bbc65699 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.mapper.date; -import org.apache.lucene.analysis.NumericTokenStream.NumericTermAttribute; +import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -189,7 +189,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); tokenStream.reset(); - NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class); + LegacyNumericTermAttribute nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); List values = new ArrayList<>(); while(tokenStream.incrementToken()) { values.add(nta.getRawValue()); @@ -197,7 +197,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); tokenStream.reset(); - nta = tokenStream.addAttribute(NumericTermAttribute.class); + nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); int pos = 0; while(tokenStream.incrementToken()) { assertThat(values.get(pos++), equalTo(nta.getRawValue())); @@ -256,10 +256,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .bytes()); assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); - NumericRangeQuery rangeQuery; + LegacyNumericRangeQuery rangeQuery; try { SearchContext.setCurrent(new TestSearchContext(null)); - rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null); + rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null); } finally { SearchContext.removeCurrent(); } @@ -282,10 +282,10 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .bytes()); assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); - NumericRangeQuery rangeQuery; + LegacyNumericRangeQuery rangeQuery; try { SearchContext.setCurrent(new TestSearchContext(null)); - rangeQuery = (NumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null); + rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null); } finally { SearchContext.removeCurrent(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 03c14ee1a45..8c25713ce3d 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper.externalvalues; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.document.Field; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index 558e3bc83fb..9d6236234af 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -56,7 +56,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); MapperRegistry mapperRegistry = new MapperRegistry( @@ -101,7 +101,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValuesWithMultifield() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); Map mapperParsers = new HashMap<>(); @@ -159,7 +159,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { } public void testExternalValuesWithMultifieldTwoLevels() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); Map mapperParsers = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index ed6c574a865..6b9282e2704 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -66,7 +66,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -96,7 +96,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("geohash", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -116,7 +116,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -136,7 +136,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -156,7 +156,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -172,7 +172,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testNormalizeLatLonValuesDefault() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); // default to normalize XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); @@ -222,7 +222,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -285,7 +285,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testNoValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -332,7 +332,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -359,7 +359,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -395,7 +395,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -419,7 +419,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -445,7 +445,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -481,7 +481,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -506,7 +506,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point") .field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -530,7 +530,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -556,7 +556,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -699,7 +699,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); @@ -724,7 +724,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 5de6c517ab2..bd23817ba50 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -57,7 +57,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -81,7 +81,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -105,7 +105,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -126,7 +126,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); @@ -140,7 +140,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); @@ -154,7 +154,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java index d171430dfff..05677d0ed8f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/DoubleIndexingDocTests.java @@ -64,7 +64,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); - IndexReader reader = DirectoryReader.open(writer, true); + IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", null), 10); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index 0cd6fa0e1c9..9923846da0e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -76,7 +76,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { // Indexing a doc in the old way FieldType fieldType = new FieldType(); fieldType.setStored(true); - fieldType.setNumericType(FieldType.NumericType.INT); + fieldType.setNumericType(FieldType.LegacyNumericType.INT); Document doc2 = new Document(); doc2.add(new StoredField("field1", new BytesRef(Numbers.intToBytes(1)))); doc2.add(new StoredField("field2", new BytesRef(Numbers.floatToBytes(1.1f)))); @@ -85,7 +85,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(3L)))); writer.addDocument(doc2); - DirectoryReader reader = DirectoryReader.open(writer, true); + DirectoryReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); Set fields = new HashSet<>(Arrays.asList("field1", "field2", "field3")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index bf21f2fd6d3..09804f82919 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper.numeric; -import org.apache.lucene.analysis.NumericTokenStream; +import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValuesType; @@ -623,8 +623,8 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { // check the tokenstream actually used by the indexer TokenStream ts = field.tokenStream(null, null); - assertThat(ts, instanceOf(NumericTokenStream.class)); - assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep()); + assertThat(ts, instanceOf(LegacyNumericTokenStream.class)); + assertEquals(expected, ((LegacyNumericTokenStream)ts).getPrecisionStep()); } public void testTermVectorsBackCompat() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index ed58bb63b65..d5efd6dcfc3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -414,27 +414,11 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { assertThat(request.timestamp(), is("1433239200000")); } - public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception { + public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject() .endObject().endObject().string(); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes(); - - // - // test with older versions - Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build(); - DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - // both index request are successfully processed - IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01"); - oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); - IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890"); - oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); - - // // test with 2.x DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index ab0182aa0ef..2e2f5f2446f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -256,7 +256,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } public void testTimestampParsing() throws IOException { - IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build()); + IndexService indexService = createIndex("test"); XContentBuilder indexMapping = XContentFactory.jsonBuilder(); boolean enabled = randomBoolean(); indexMapping.startObject() diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java index 7ccad1ffd2a..b14d5f50776 100644 --- a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; @@ -60,7 +60,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase numericRangeQuery = (NumericRangeQuery) query; + LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; assertTrue(numericRangeQuery.includesMin()); assertTrue(numericRangeQuery.includesMax()); diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index a4af84a8f79..238a186394d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -25,9 +25,9 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -134,7 +134,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); - NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query; + LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query; assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L)); assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index fbb708a5d97..9f99b85a294 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; @@ -118,8 +118,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; + assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); + LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); @@ -220,15 +220,15 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) { + if (getCurrentTypes().length > 0) { Query luceneQuery = queryBuilder.toQuery(shardContext); assertThat(luceneQuery, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) luceneQuery; diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 4ed78e3f5ff..4b6788d463f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -276,7 +276,7 @@ public class FunctionScoreTests extends ESTestCase { d.add(new TextField("_uid", "1", Field.Store.YES)); w.addDocument(d); w.commit(); - reader = DirectoryReader.open(w, true); + reader = DirectoryReader.open(w); searcher = newSearcher(reader); } @@ -634,13 +634,11 @@ public class FunctionScoreTests extends ESTestCase { ScoreFunction otherFunciton = function == null ? new DummyScoreFunction(combineFunction) : null; FunctionScoreQuery diffFunction = new FunctionScoreQuery(q.getSubQuery(), otherFunciton, minScore, combineFunction, maxBoost); FunctionScoreQuery diffMaxBoost = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost == 1.0f ? 0.9f : 1.0f); - q1.setBoost(3.0f); FunctionScoreQuery[] queries = new FunctionScoreQuery[] { diffFunction, diffMinScore, diffQuery, q, - q1, diffMaxBoost }; final int numIters = randomIntBetween(20, 100); @@ -678,7 +676,6 @@ public class FunctionScoreTests extends ESTestCase { FiltersFunctionScoreQuery diffMinScore = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, new FilterFunction[] {function}, maxBoost, minScore == null ? 0.9f : null, combineFunction); FilterFunction otherFunc = new FilterFunction(new TermQuery(new Term("filter", "other_query")), scoreFunction); FiltersFunctionScoreQuery diffFunc = new FiltersFunctionScoreQuery(new TermQuery(new Term("foo", "bar")), mode, randomBoolean() ? new FilterFunction[] {function, otherFunc} : new FilterFunction[] {otherFunc}, maxBoost, minScore, combineFunction); - q1.setBoost(3.0f); FiltersFunctionScoreQuery[] queries = new FiltersFunctionScoreQuery[] { diffQuery, @@ -687,7 +684,6 @@ public class FunctionScoreTests extends ESTestCase { diffMode, diffFunc, q, - q1, diffCombineFunc }; final int numIters = randomIntBetween(20, 100); diff --git a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index 44b91679623..d3c9975cf58 100644 --- a/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.search.geo; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 4ef84d118fd..5b5b24bbe4b 100644 --- a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -218,7 +218,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD writer.addDocument(document); MultiValueMode sortMode = MultiValueMode.SUM; - DirectoryReader directoryReader = DirectoryReader.open(writer, false); + DirectoryReader directoryReader = DirectoryReader.open(writer); directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); Query parentFilter = new TermQuery(new Term("__type", "parent")); diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index ff82b7c43ac..d5e9ff85ba3 100644 --- a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -87,13 +87,13 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { docs.add(parent); writer.addDocuments(docs); if (rarely()) { // we need to have a bit more segments than what RandomIndexWriter would do by default - DirectoryReader.open(writer, false).close(); + DirectoryReader.open(writer).close(); } } writer.commit(); MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX)); - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(reader); PagedBytesIndexFieldData indexFieldData1 = getForField("f"); @@ -278,7 +278,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { writer.addDocument(document); MultiValueMode sortMode = MultiValueMode.MIN; - DirectoryReader reader = DirectoryReader.open(writer, false); + DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(reader); PagedBytesIndexFieldData indexFieldData = getForField("field2"); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index cf95f22ae3b..cfadab6efb8 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -56,7 +56,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); final AtomicInteger closeCalls = new AtomicInteger(0); @@ -106,7 +106,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); @@ -148,7 +148,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); @@ -168,7 +168,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java index 105179a1f53..e960622d1c1 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java @@ -39,7 +39,7 @@ public class ShardUtilsTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.commit(); ShardId id = new ShardId("foo", "_na_", random().nextInt()); - try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) { + try (DirectoryReader reader = DirectoryReader.open(writer)) { ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); assertEquals(id, ShardUtils.extractShardId(wrap)); } @@ -53,7 +53,7 @@ public class ShardUtilsTests extends ESTestCase { } } - try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) { + try (DirectoryReader reader = DirectoryReader.open(writer)) { ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); assertEquals(id, ShardUtils.extractShardId(wrap)); CompositeReaderContext context = wrap.getContext(); diff --git a/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java b/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java deleted file mode 100644 index 2fc02fb0503..00000000000 --- a/core/src/test/java/org/elasticsearch/index/shard/VersionFieldUpgraderTests.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.Token; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.document.TextField; -import org.apache.lucene.index.CodecReader; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.VersionFieldMapper; -import org.elasticsearch.test.ESTestCase; - -/** Tests upgrading old document versions from _uid payloads to _version docvalues */ -public class VersionFieldUpgraderTests extends ESTestCase { - - /** Simple test: one doc in the old format, check that it looks correct */ - public void testUpgradeOneDocument() throws Exception { - Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null)); - - // add a document with a _uid having a payload of 3 - Document doc = new Document(); - Token token = new Token("1", 0, 1); - token.setPayload(new BytesRef(Numbers.longToBytes(3))); - doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token))); - iw.addDocument(doc); - iw.commit(); - - CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true)); - CodecReader upgraded = VersionFieldUpgrader.wrap(reader); - // we need to be upgraded, should be a different instance - assertNotSame(reader, upgraded); - - // make sure we can see our numericdocvalues in fieldinfos - FieldInfo versionField = upgraded.getFieldInfos().fieldInfo(VersionFieldMapper.NAME); - assertNotNull(versionField); - assertEquals(DocValuesType.NUMERIC, versionField.getDocValuesType()); - // should have a value of 3, and be visible in docsWithField - assertEquals(3, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(0)); - assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(0)); - - // verify filterreader with checkindex - TestUtil.checkReader(upgraded); - - reader.close(); - iw.close(); - dir.close(); - } - - /** test that we are a non-op if the segment already has the version field */ - public void testAlreadyUpgraded() throws Exception { - Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null)); - - // add a document with a _uid having a payload of 3 - Document doc = new Document(); - Token token = new Token("1", 0, 1); - token.setPayload(new BytesRef(Numbers.longToBytes(3))); - doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token))); - doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 3)); - iw.addDocument(doc); - iw.commit(); - - CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true)); - CodecReader upgraded = VersionFieldUpgrader.wrap(reader); - // we already upgraded: should be same instance - assertSame(reader, upgraded); - - reader.close(); - iw.close(); - dir.close(); - } - - /** Test upgrading two documents */ - public void testUpgradeTwoDocuments() throws Exception { - Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null)); - - // add a document with a _uid having a payload of 3 - Document doc = new Document(); - Token token = new Token("1", 0, 1); - token.setPayload(new BytesRef(Numbers.longToBytes(3))); - doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token))); - iw.addDocument(doc); - - doc = new Document(); - token = new Token("2", 0, 1); - token.setPayload(new BytesRef(Numbers.longToBytes(4))); - doc.add(new TextField(UidFieldMapper.NAME, new CannedTokenStream(token))); - iw.addDocument(doc); - - iw.commit(); - - CodecReader reader = getOnlySegmentReader(DirectoryReader.open(iw, true)); - CodecReader upgraded = VersionFieldUpgrader.wrap(reader); - // we need to be upgraded, should be a different instance - assertNotSame(reader, upgraded); - - // make sure we can see our numericdocvalues in fieldinfos - FieldInfo versionField = upgraded.getFieldInfos().fieldInfo(VersionFieldMapper.NAME); - assertNotNull(versionField); - assertEquals(DocValuesType.NUMERIC, versionField.getDocValuesType()); - // should have a values of 3 and 4, and be visible in docsWithField - assertEquals(3, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(0)); - assertEquals(4, upgraded.getNumericDocValues(VersionFieldMapper.NAME).get(1)); - assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(0)); - assertTrue(upgraded.getDocsWithField(VersionFieldMapper.NAME).get(1)); - - // verify filterreader with checkindex - TestUtil.checkReader(upgraded); - - reader.close(); - iw.close(); - dir.close(); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java new file mode 100644 index 00000000000..edb337fd4e6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.similarity; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.util.Collections; + +public class SimilarityServiceTests extends ESTestCase { + + // Tests #16594 + public void testOverrideBuiltInSimilarity() { + Settings settings = Settings.builder().put("index.similarity.BM25.type", "classic").build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + try { + new SimilarityService(indexSettings, Collections.emptyMap()); + fail("can't override bm25"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Cannot redefine built-in Similarity [BM25]"); + } + } + + // Pre v3 indices could override built-in similarities + public void testOverrideBuiltInSimilarityPreV3() { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .put("index.similarity.BM25.type", "classic") + .build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap()); + assertTrue(service.getSimilarity("BM25") instanceof ClassicSimilarityProvider); + } + + // Tests #16594 + public void testDefaultSimilarity() { + Settings settings = Settings.builder().put("index.similarity.default.type", "BM25").build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); + SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap()); + assertTrue(service.getDefaultSimilarity() instanceof BM25SimilarityProvider); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 36fc5cf0717..eff41dd3ffe 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -294,67 +294,6 @@ public class StoreTests extends ESTestCase { IOUtils.close(verifyingOutput, dir); } - // TODO: remove this, its too fragile. just use a static old index instead. - private static final class OldSIMockingCodec extends FilterCodec { - - protected OldSIMockingCodec() { - super(new Lucene54Codec().getName(), new Lucene54Codec()); - } - - @Override - public SegmentInfoFormat segmentInfoFormat() { - final SegmentInfoFormat segmentInfoFormat = super.segmentInfoFormat(); - return new SegmentInfoFormat() { - @Override - public SegmentInfo read(Directory directory, String segmentName, byte[] segmentID, IOContext context) throws IOException { - return segmentInfoFormat.read(directory, segmentName, segmentID, context); - } - - // this sucks it's a full copy of Lucene50SegmentInfoFormat but hey I couldn't find a way to make it write 4_5_0 versions - // somebody was too paranoid when implementing this. ey rmuir, was that you? - go fix it :P - @Override - public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { - final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION); - si.addFile(fileName); - - boolean success = false; - try (IndexOutput output = dir.createOutput(fileName, ioContext)) { - CodecUtil.writeIndexHeader(output, - "Lucene50SegmentInfo", - 0, - si.getId(), - ""); - Version version = Version.LUCENE_4_5_0; // FOOOOOO!! - // Write the Lucene version that created this segment, since 3.1 - output.writeInt(version.major); - output.writeInt(version.minor); - output.writeInt(version.bugfix); - assert version.prerelease == 0; - output.writeInt(si.maxDoc()); - - output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); - output.writeStringStringMap(si.getDiagnostics()); - Set files = si.files(); - for (String file : files) { - if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { - throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); - } - } - output.writeStringSet(files); - output.writeStringStringMap(si.getAttributes()); - CodecUtil.writeFooter(output); - success = true; - } finally { - if (!success) { - // TODO: are we doing this outside of the tracking wrapper? why must SIWriter cleanup like this? - IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName); - } - } - } - }; - } - } - public void testNewChecksums() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); @@ -381,7 +320,7 @@ public class StoreTests extends ESTestCase { } } if (random().nextBoolean()) { - DirectoryReader.open(writer, random().nextBoolean()).close(); // flush + DirectoryReader.open(writer).close(); // flush } Store.MetadataSnapshot metadata; // check before we committed @@ -472,32 +411,12 @@ public class StoreTests extends ESTestCase { } - final Adler32 adler32 = new Adler32(); final long luceneChecksum; try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } - { // positive check - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertTrue(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong checksum - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong length - StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } - - { // negative check - wrong file - StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - assertFalse(Store.checkIntegrityNoException(lucene, dir)); - } dir.close(); } @@ -600,8 +519,6 @@ public class StoreTests extends ESTestCase { dir = StoreTests.newDirectory(random); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setPreventDoubleWrite(preventDoubleWrite); - // TODO: fix this test to handle virus checker - ((MockDirectoryWrapper) dir).setEnableVirusScanner(false); } this.random = random; } @@ -859,28 +776,6 @@ public class StoreTests extends ESTestCase { IOUtils.close(store); } - public void testCleanUpWithLegacyChecksums() throws IOException { - Map metaDataMap = new HashMap<>(); - metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, "foobar", Version.LUCENE_4_8_0, new BytesRef(new byte[]{1}))); - metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", Version.LUCENE_4_8_0, new BytesRef())); - Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0); - - final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); - for (String file : metaDataMap.keySet()) { - try (IndexOutput output = store.directory().createOutput(file, IOContext.DEFAULT)) { - BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); - output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); - CodecUtil.writeFooter(output); - } - } - - store.verifyAfterCleanup(snapshot, snapshot); - deleteContent(store.directory()); - IOUtils.close(store); - } - public void testOnCloseCallback() throws IOException { final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100)); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index d6e248f1c94..5a4aa2e6b24 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -84,7 +84,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); w.addDocument(new Document()); - DirectoryReader r = DirectoryReader.open(w, false); + DirectoryReader r = DirectoryReader.open(w); w.close(); ShardId shard = new ShardId("index", "_na_", 0); r = ElasticsearchDirectoryReader.wrap(r, shard); @@ -154,7 +154,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir1 = newDirectory(); IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); w1.addDocument(new Document()); - DirectoryReader r1 = DirectoryReader.open(w1, false); + DirectoryReader r1 = DirectoryReader.open(w1); w1.close(); ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); @@ -164,7 +164,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); w2.addDocument(new Document()); - DirectoryReader r2 = DirectoryReader.open(w2, false); + DirectoryReader r2 = DirectoryReader.open(w2); w2.close(); ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); @@ -279,7 +279,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir1 = newDirectory(); IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig()); w1.addDocument(new Document()); - DirectoryReader r1 = DirectoryReader.open(w1, false); + DirectoryReader r1 = DirectoryReader.open(w1); w1.close(); ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); @@ -289,7 +289,7 @@ public class IndicesQueryCacheTests extends ESTestCase { Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); w2.addDocument(new Document()); - DirectoryReader r2 = DirectoryReader.open(w2, false); + DirectoryReader r2 = DirectoryReader.open(w2); w2.close(); ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index bd48a388f34..e36f1bca49b 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -54,7 +54,7 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); AtomicBoolean indexShard = new AtomicBoolean(true); @@ -107,7 +107,7 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); @@ -144,12 +144,12 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1)); + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1)); + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); // initial cache @@ -237,13 +237,13 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); @@ -263,18 +263,18 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); - DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0); @@ -299,18 +299,18 @@ public class IndicesRequestCacheTests extends ESTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); - DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); - DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); - DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), + DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); AtomicBoolean differentIdentity = new AtomicBoolean(true); TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0); diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 09371c38dab..4597765c11c 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) -@ESBackcompatTestCase.CompatibilityVersion(version = Version.V_1_2_0_ID) // we throw an exception if we create an index with _field_names that is 1.3 public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 92b96d8e47d..23a197dbab6 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -19,15 +19,8 @@ package org.elasticsearch.indices.analyze; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.core.IsNull; @@ -196,53 +189,6 @@ public class AnalyzeActionIT extends ESIntegTestCase { return randomBoolean() ? "test" : "alias"; } - public void testParseXContentForAnalyzeReuqest() throws Exception { - BytesReference content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject().bytes(); - - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer(), equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"})); - } - - public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - try { - RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), equalTo("Failed to parse request body")); - } - } - - public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - BytesReference invalidContent =XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("unknown", "keyword") - .endObject().bytes(); - - try { - RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); - } - } - public void testAnalyzerWithMultiValues() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index b5f744ddc23..b69d1218546 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -94,7 +94,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { @Override public void close() throws IOException { super.close(); - store.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it + targetStore.directory().sync(Collections.singleton(md.name())); // sync otherwise MDW will mess with it } }; } catch (IOException e) { diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index e29ad3e081a..467aa4d3309 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.indices.stats; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; @@ -542,7 +541,6 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getTotal().getSegments(), notNullValue()); assertThat(stats.getTotal().getSegments().getCount(), equalTo((long) test1.totalNumShards)); - assumeTrue("test doesn't work with 4.6.0", org.elasticsearch.Version.CURRENT.luceneVersion != Version.LUCENE_4_6_0); assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0L)); } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java new file mode 100644 index 00000000000..abfe18f8c58 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.node.service.NodeService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +@ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { + + private final BytesReference pipelineSource; + private volatile boolean installPlugin; + + public IngestProcessorNotInstalledOnAllNodesIT() throws IOException { + pipelineSource = jsonBuilder().startObject() + .startArray("processors") + .startObject() + .startObject("test") + .endObject() + .endObject() + .endArray() + .endObject().bytes(); + } + + @Override + protected Collection> nodePlugins() { + return installPlugin ? pluginList(IngestClientIT.IngestPlugin.class) : Collections.emptyList(); + } + + @Override + protected Collection> getMockPlugins() { + return Collections.singletonList(TestSeedPlugin.class); + } + + public void testFailPipelineCreation() throws Exception { + installPlugin = true; + internalCluster().startNode(); + installPlugin = false; + internalCluster().startNode(); + + try { + client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Processor type [test] is not installed on node")); + } + } + + public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws Exception { + internalCluster().startNode(); + installPlugin = true; + internalCluster().startNode(); + + try { + client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + fail("exception expected"); + } catch (ElasticsearchParseException e) { + assertThat(e.getMessage(), equalTo("No processor type exists with name [test]")); + } + } + + // If there is pipeline defined and a node joins that doesn't have the processor installed then + // that pipeline can't be used on this node. + public void testFailStartNode() throws Exception { + installPlugin = true; + String node1 = internalCluster().startNode(); + + WritePipelineResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource).get(); + assertThat(response.isAcknowledged(), is(true)); + Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id"); + assertThat(pipeline, notNullValue()); + + installPlugin = false; + String node2 = internalCluster().startNode(); + pipeline = internalCluster().getInstance(NodeService.class, node2).getIngestService().getPipelineStore().get("_id"); + assertThat(pipeline, nullValue()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index fb0605f90b5..4009e4877b9 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -21,24 +21,32 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.ingest.core.Pipeline; +import org.elasticsearch.ingest.core.ProcessorInfo; +import org.elasticsearch.ingest.processor.RemoveProcessor; import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -52,6 +60,7 @@ public class PipelineStoreTests extends ESTestCase { store = new PipelineStore(Settings.EMPTY); ProcessorsRegistry.Builder registryBuilder = new ProcessorsRegistry.Builder(); registryBuilder.registerProcessor("set", (templateService, registry) -> new SetProcessor.Factory(TestTemplateService.instance())); + registryBuilder.registerProcessor("remove", (templateService, registry) -> new RemoveProcessor.Factory(TestTemplateService.instance())); store.buildProcessorFactoryRegistry(registryBuilder, null); } @@ -197,4 +206,38 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipeline, nullValue()); } + public void testValidate() throws Exception { + PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}},{\"remove\" : {\"field\": \"_field\"}}]}")); + + DiscoveryNode node1 = new DiscoveryNode("_node_id1", new LocalTransportAddress("_id"), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("_node_id2", new LocalTransportAddress("_id"), Version.CURRENT); + Map ingestInfos = new HashMap<>(); + ingestInfos.put(node1, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); + ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set")))); + + try { + store.validatePipeline(ingestInfos, putRequest); + fail("exception expected"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [{_node_id2}{local}{local[_id]}]")); + } + + ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); + store.validatePipeline(ingestInfos, putRequest); + } + + public void testValidateNoIngestInfo() throws Exception { + PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")); + try { + store.validatePipeline(Collections.emptyMap(), putRequest); + fail("exception expected"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo("Ingest info is empty")); + } + + DiscoveryNode discoveryNode = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT); + IngestInfo ingestInfo = new IngestInfo(Collections.singletonList(new ProcessorInfo("set"))); + store.validatePipeline(Collections.singletonMap(discoveryNode, ingestInfo), putRequest); + } + } diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java index fdf48ff4281..537d8f020e6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java @@ -23,11 +23,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.processor.FailProcessor; +import org.elasticsearch.ingest.processor.SetProcessor; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.prefs.PreferencesFactory; @@ -115,6 +118,15 @@ public class PipelineFactoryTests extends ESTestCase { assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound")); } + public void testFlattenProcessors() throws Exception { + TestProcessor testProcessor = new TestProcessor(ingestDocument -> {}); + CompoundProcessor processor1 = new CompoundProcessor(testProcessor, testProcessor); + CompoundProcessor processor2 = new CompoundProcessor(Collections.singletonList(testProcessor), Collections.singletonList(testProcessor)); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor1, processor2)); + List flattened = pipeline.flattenAllProcessors(); + assertThat(flattened.size(), equalTo(4)); + } + private ProcessorsRegistry createProcessorRegistry(Map processorRegistry) { ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); for (Map.Entry entry : processorRegistry.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 693ba4a2eba..2a845303675 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.ingest.core.IngestInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.DummyOsInfo; import org.elasticsearch.monitor.os.OsInfo; @@ -90,6 +91,7 @@ public class NodeInfoStreamingTests extends ESTestCase { compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs()); comparePluginsAndModules(nodeInfo, readNodeInfo); + compareJsonOutput(nodeInfo.getIngest(), readNodeInfo.getIngest()); } private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { @@ -135,6 +137,7 @@ public class NodeInfoStreamingTests extends ESTestCase { PluginsAndModules plugins = new PluginsAndModules(); plugins.addModule(DummyPluginInfo.INSTANCE); plugins.addPlugin(DummyPluginInfo.INSTANCE); - return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins); + IngestInfo ingestInfo = new IngestInfo(); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo); } } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java index 170b0be30df..8bef9138567 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorQueryTests.java @@ -147,7 +147,9 @@ public class PercolatorQueryTests extends ESTestCase { indexWriter.close(); directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); + // don't use newSearcher, which randomizes similarity. if it gets classic sim, the test eats it, + // as the score becomes 1 due to querynorm. + IndexSearcher shardSearcher = new IndexSearcher(directoryReader); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 37a0f4e358e..04bff31057d 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -176,12 +176,12 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "version", "1.0", - "elasticsearch.version", Version.V_1_7_0.toString()); + "elasticsearch.version", Version.V_2_0_0.toString()); try { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Was designed for version [1.7.0]")); + assertTrue(e.getMessage().contains("Was designed for version [2.0.0]")); } } diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java new file mode 100644 index 00000000000..34e8315372b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.rest.action.admin.indices.analyze; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; + +public class RestAnalyzeActionTests extends ESTestCase { + + public void testParseXContentForAnalyzeRequest() throws Exception { + BytesReference content = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject().bytes(); + + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + + RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer(), equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"})); + } + + public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + + try { + RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), equalTo("Failed to parse request body")); + } + } + + public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + BytesReference invalidContent = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("unknown", "keyword") + .endObject().bytes(); + + try { + RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + } + } + + public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + BytesReference invalidExplain = XContentFactory.jsonBuilder() + .startObject() + .field("explain", "fals") + .endObject().bytes(); + try { + RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + fail("shouldn't get here"); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + } + } + + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 2e4a974b778..c2ac2078c06 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.joda.DateMathParser; @@ -28,22 +26,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.CompiledScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.SearchScript; +import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; @@ -55,7 +45,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -142,8 +131,7 @@ public class DateHistogramIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return Arrays.asList( - ExtractFieldScriptPlugin.class, - FieldValueScriptPlugin.class); + DateScriptsMockPlugin.class); } @After @@ -466,10 +454,12 @@ public class DateHistogramIT extends ESIntegTestCase { } public void testSingleValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -600,10 +590,12 @@ public class DateHistogramIT extends ESIntegTestCase { * doc 6: [ Apr 23, May 24] */ public void testMultiValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("dates") - .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -652,8 +644,11 @@ public class DateHistogramIT extends ESIntegTestCase { * Mar 23 */ public void testScriptSingleValue() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, + ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -687,8 +682,11 @@ public class DateHistogramIT extends ESIntegTestCase { } public void testScriptMultiValued() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, + ScriptType.INLINE, "native", params)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -1148,256 +1146,4 @@ public class DateHistogramIT extends ESIntegTestCase { Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), greaterThan(0)); } - - /** - * Mock plugin for the {@link ExtractFieldScriptEngine} - */ - public static class ExtractFieldScriptPlugin extends Plugin { - - @Override - public String name() { - return ExtractFieldScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field that is specified by name in the script body - */ - public static class ExtractFieldScriptEngine implements ScriptEngineService { - - public static final String NAME = "extract_field"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - @Override - public void setNextVar(String name, Object value) { - } - - @Override - public Object run() { - String fieldName = (String) compiledScript.compiled(); - return leafLookup.doc().get(fieldName); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - throw new UnsupportedOperationException(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - throw new UnsupportedOperationException(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } - - /** - * Mock plugin for the {@link FieldValueScriptEngine} - */ - public static class FieldValueScriptPlugin extends Plugin { - - @Override - public String name() { - return FieldValueScriptEngine.NAME; - } - - @Override - public String description() { - return "Mock script engine for " + DateHistogramIT.class; - } - - public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES)); - } - - } - - /** - * This mock script returns the field value and adds one month to the returned date - */ - public static class FieldValueScriptEngine implements ScriptEngineService { - - public static final String NAME = "field_value"; - - public static final List TYPES = Collections.singletonList(NAME); - - @Override - public void close() throws IOException { - } - - @Override - public List getTypes() { - return TYPES; - } - - @Override - public List getExtensions() { - return TYPES; - } - - @Override - public boolean isSandboxed() { - return true; - } - - @Override - public Object compile(String script, Map params) { - return script; - } - - @Override - public ExecutableScript executable(CompiledScript compiledScript, Map params) { - throw new UnsupportedOperationException(); - } - @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { - return new SearchScript() { - - private Map vars = new HashMap<>(2); - - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - - final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - throw new UnsupportedOperationException(); - } - - @Override - public void setNextVar(String name, Object value) { - vars.put(name, value); - } - - @Override - public Object run() { - throw new UnsupportedOperationException(); - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public void setSource(Map source) { - } - - @Override - public void setDocument(int doc) { - if (leafLookup != null) { - leafLookup.setDocument(doc); - } - } - - @Override - public long runAsLong() { - return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); - } - - @Override - public float runAsFloat() { - throw new UnsupportedOperationException(); - } - - @Override - public double runAsDouble() { - return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis(); - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - }; - } - - @Override - public void scriptRemoved(CompiledScript script) { - } - } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index cc96555c372..2200e0e30ca 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -65,13 +65,6 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { return Collections.singleton(AssertingLocalTransport.TestPlugin.class); } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build(); - } - @Before public void beforeEachTest() throws IOException { prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java similarity index 92% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 44f7a93ade1..b1dc61a9b9e 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; @@ -36,8 +37,9 @@ import org.joda.time.DateTimeZone; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -55,12 +57,7 @@ import static org.hamcrest.core.IsNull.nullValue; * */ @ESIntegTestCase.SuiteScopeTestCase -public class DateRangeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); - } +public class DateRangeIT extends ESIntegTestCase { private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { return client().prepareIndex("idx", "type").setSource(jsonBuilder() @@ -72,7 +69,11 @@ public class DateRangeTests extends ESIntegTestCase { } private static DateTime date(int month, int day) { - return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC); + return date(month, day, DateTimeZone.UTC); + } + + private static DateTime date(int month, int day, DateTimeZone timezone) { + return new DateTime(2012, month, day, 0, 0, timezone); } private static int numDocs; @@ -107,18 +108,26 @@ public class DateRangeTests extends ESIntegTestCase { ensureSearchable(); } + @Override + protected Collection> nodePlugins() { + return Arrays.asList( + DateScriptsMockPlugin.class); + } + public void testDateMath() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); DateRangeAggregatorBuilder rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { - rangeBuilder.script(new Script("doc['date'].value")); + rangeBuilder.script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)); } SearchResponse response = client() .prepareSearch("idx") .addAggregation( rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y") - .addUnboundedFrom("last year", "now-1y")).execute().actionGet(); + .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).execute().actionGet(); assertSearchResponse(response); @@ -286,17 +295,25 @@ public class DateRangeTests extends ESIntegTestCase { } public void testSingleValueFieldWithDateMath() throws Exception { + String[] ids = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); + DateTimeZone timezone = DateTimeZone.forID(randomFrom(ids)); + int timeZoneOffset = timezone.getOffset(date(2, 15)); + // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format + String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ"); + String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ"); + long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; + SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("date") .addUnboundedTo("2012-02-15") .addRange("2012-02-15", "2012-02-15||+1M") - .addUnboundedFrom("2012-02-15||+1M")) + .addUnboundedFrom("2012-02-15||+1M") + .timeZone(timezone)) .execute().actionGet(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -305,30 +322,31 @@ public class DateRangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); + assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix)); assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); assertThat(bucket.getFromAsString(), nullValue()); - assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); - assertThat(bucket.getDocCount(), equalTo(2L)); + assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); + assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); - assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); - assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); + assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix + + "-2012-03-15T00:00:00.000" + mar15Suffix)); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); + assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); + assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); + assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); assertThat(bucket.getDocCount(), equalTo(2L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*")); + assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); assertThat(((DateTime) bucket.getTo()), nullValue()); - assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); + assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); assertThat(bucket.getToAsString(), nullValue()); - assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); + assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount)); } public void testSingleValueFieldWithCustomKey() throws Exception { @@ -520,10 +538,12 @@ public class DateRangeTests extends ESIntegTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") .field("dates") - .script(new Script("new DateTime(_value.longValue(), DateTimeZone.UTC).plusMonths(1).getMillis()")) + .script(new Script(DateScriptMocks.PlusOneMonthScript.NAME, ScriptType.INLINE, "native", params)) .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute() .actionGet(); @@ -575,9 +595,11 @@ public class DateRangeTests extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateRange("range") - .script(new Script("doc['date'].value")) + .script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) .addUnboundedTo(date(2, 15)) .addRange(date(2, 15), date(3, 15)) .addUnboundedFrom(date(3, 15))) @@ -634,11 +656,14 @@ public class DateRangeTests extends ESIntegTestCase { */ public void testScriptMultiValued() throws Exception { + Map params = new HashMap<>(); + params.put("fieldname", "dates"); SearchResponse response = client() .prepareSearch("idx") .addAggregation( - dateRange("range").script(new Script("doc['dates'].values")).addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).execute().actionGet(); + dateRange("range").script(new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params)) + .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15))).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 94156fc3a5d..71b61c0e6e6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -22,9 +22,12 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.joda.time.DateTimeZone; public class DateRangeTests extends BaseAggregationTestCase { + private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); + @Override protected DateRangeAggregatorBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); @@ -56,6 +59,9 @@ public class DateRangeTests extends BaseAggregationTestCase params) { + return new ExtractFieldScript((String) params.get("fieldname")); + } + @Override + public boolean needsScores() { + return false; + } + } + + public static class ExtractFieldScript extends AbstractSearchScript { + + public static final String NAME = "extract_field"; + private String fieldname; + + public ExtractFieldScript(String fieldname) { + this.fieldname = fieldname; + } + + @Override + public Object run() { + return doc().get(fieldname); + } + } + + public static class PlusOneMonthScriptFactory implements NativeScriptFactory { + + @Override + public ExecutableScript newScript(Map params) { + return new PlusOneMonthScript((String) params.get("fieldname")); + } + + @Override + public boolean needsScores() { + return false; + } + } + + /** + * This mock script takes date field value and adds one month to the returned date + */ + public static class PlusOneMonthScript extends AbstractSearchScript { + + public static final String NAME = "date_plus_1_month"; + private String fieldname; + + private Map vars = new HashMap<>(); + + public PlusOneMonthScript(String fieldname) { + this.fieldname = fieldname; + } + + @Override + public void setNextVar(String name, Object value) { + vars.put(name, value); + } + + @Override + public long runAsLong() { + return new DateTime((long) vars.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis(); + } + + @Override + public double runAsDouble() { + return new DateTime(new Double((double) vars.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis(); + } + + @Override + public Object run() { + return new UnsupportedOperationException(); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 8312f4aca04..6d2d11e2799 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -67,7 +67,7 @@ public class GeoDistanceIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { XContentBuilder source = jsonBuilder().startObject().field("city", name); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index 22413a7b319..5aa7ba44466 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -63,7 +63,7 @@ public class GeoHashGridIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); static ObjectIntMap expectedDocCountsForGeoHash = null; static ObjectIntMap multiValuedExpectedDocCountsForGeoHash = null; diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java index 60810ee4df6..7587866b144 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.fetch.innerhits; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.IntField; +import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -66,7 +66,7 @@ public class NestedChildrenFilterTests extends ESTestCase { Document parenDoc = new Document(); parenDoc.add(new StringField("type", "parent", Field.Store.NO)); - parenDoc.add(new IntField("num_child_docs", numChildDocs, Field.Store.YES)); + parenDoc.add(new LegacyIntField("num_child_docs", numChildDocs, Field.Store.YES)); docs.add(parenDoc); writer.addDocuments(docs); } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index e96b4d69b00..175adc27892 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -31,8 +31,10 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchHits; @@ -50,6 +52,7 @@ import java.util.Locale; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -74,6 +77,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } + private final QueryBuilder baseQuery = constantScoreQuery(termQuery("test", "value")); + public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked(prepareCreate("test").addMapping( "type1", @@ -117,7 +122,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -125,7 +130,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -136,7 +141,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -144,7 +149,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -155,7 +160,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -163,7 +168,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), exponentialDecayFunction("loc", lonlat, "1000km"))))); + functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km"))))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -314,30 +319,30 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .setSource( jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 20).field("lon", 11).endObject() .endObject()).setRefresh(true).get(); - + FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(randomIntBetween(1, 10))); GeoPoint point = new GeoPoint(20, 11); ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", point, "1000km")).boostMode( - CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", point, "1000km")).boostMode( + CombineFunction.REPLACE)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); + // this is equivalent to new GeoPoint(20, 11); just flipped so scores must be same float[] coords = { 11, 20 }; - response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", coords, "1000km")).boostMode( - CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", coords, "1000km")).boostMode( + CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0f, 1.e-5)); } public void testCombineModes() throws Exception { @@ -348,26 +353,25 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ensureYellow(); client().prepareIndex().setType("type1").setId("1").setIndex("test") - .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()).setRefresh(true).get(); - - // function score should return 0.5 for this function - + .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()).setRefresh(true).get(); + FunctionScoreQueryBuilder baseQuery = functionScoreQuery(constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(2)); + // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MULTIPLY)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MULTIPLY)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.153426408, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(1.0, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.REPLACE)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); @@ -377,48 +381,48 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.SUM)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.SUM)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282 + 0.5, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo(2.0 + 0.5, 1.e-5)); logger.info("--> Hit[0] {} Explanation:\n {}", sr.getHits().getAt(0).id(), sr.getHits().getAt(0).explanation()); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.AVG)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.AVG)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo((0.30685282 + 0.5) / 2, 1.e-5)); + assertThat((double) sh.getAt(0).score(), closeTo((2.0 + 0.5) / 2, 1.e-5)); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MIN)))); - sr = response.actionGet(); - sh = sr.getHits(); - assertThat(sh.getTotalHits(), equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).score(), closeTo(0.30685282, 1.e-5)); - - response = client().search( - searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( - 2.0f).boostMode(CombineFunction.MAX)))); + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MIN)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).score(), closeTo(0.5, 1.e-5)); + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)) + .boostMode(CombineFunction.MAX)))); + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits(), equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).score(), closeTo(2.0, 1.e-5)); + } public void testExceptionThrownIfScaleLE0() throws Exception { @@ -509,7 +513,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + functionScoreQuery(baseQuery, new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")), new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1")) }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); @@ -733,7 +737,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().source( - searchSource().query(constantScoreQuery(termQuery("test", "value"))))); + searchSource().query(baseQuery))); SearchResponse sr = response.actionGet(); assertSearchHits(sr, "1", "2"); SearchHits sh = sr.getHits(); @@ -745,7 +749,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -755,7 +759,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX))))); + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -784,7 +788,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM))))); + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -795,7 +799,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG))))); + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 8a060af2ab0..8f04bd72756 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.geo; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 7afbeaa9abf..e41e3c178c5 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.geo; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.action.get.GetResponse; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 9f898a47c06..f34d5b33c9d 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -268,78 +268,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { equalTo("Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com")); } - public void testNgramHighlightingPreLucene42() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("test", - "name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets", - "name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets") - .setSettings(settingsBuilder() - .put(indexSettings()) - .put("analysis.filter.my_ngram.max_gram", 20) - .put("analysis.filter.my_ngram.version", "4.1") - .put("analysis.filter.my_ngram.min_gram", 1) - .put("analysis.filter.my_ngram.type", "ngram") - .put("analysis.tokenizer.my_ngramt.max_gram", 20) - .put("analysis.tokenizer.my_ngramt.version", "4.1") - .put("analysis.tokenizer.my_ngramt.min_gram", 1) - .put("analysis.tokenizer.my_ngramt.type", "ngram") - .put("analysis.analyzer.name_index_analyzer.tokenizer", "my_ngramt") - .put("analysis.analyzer.name2_index_analyzer.tokenizer", "whitespace") - .putArray("analysis.analyzer.name2_index_analyzer.filter", "lowercase", "my_ngram") - .put("analysis.analyzer.name_search_analyzer.tokenizer", "whitespace") - .put("analysis.analyzer.name_search_analyzer.filter", "lowercase"))); - ensureYellow(); - client().prepareIndex("test", "test", "1") - .setSource("name", "logicacmg ehemals avinci - the know how company", - "name2", "logicacmg ehemals avinci - the know how company").get(); - client().prepareIndex("test", "test", "2") - .setSource("name", "avinci, unilog avinci, logicacmg, logica", - "name2", "avinci, unilog avinci, logicacmg, logica").get(); - refresh(); - - SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica m"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica ma"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica"))) - .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 0, "name", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica m"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica ma"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica"))) - .highlighter(new HighlightBuilder().field("name2")).get(); - assertHighlight(search, 0, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - assertHighlight(search, 1, "name2", 0, anyOf(equalTo("logicacmg ehemals avinci - the know how company"), - equalTo("avinci, unilog avinci, logicacmg, logica"))); - } - public void testNgramHighlighting() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index fad1cc3a0ef..084e07e0389 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -20,15 +20,12 @@ package org.elasticsearch.search.innerhits; import org.apache.lucene.util.ArrayUtil; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.plugins.Plugin; @@ -75,7 +72,7 @@ import static org.hamcrest.Matchers.nullValue; public class InnerHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class); + return pluginList(MockScriptEngine.TestPlugin.class); } public void testSimpleNested() throws Exception { @@ -753,160 +750,6 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); } - public void testNestedInnerHitsWithStoredFieldsAndNoSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("enabled", false).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message")))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsWithHighlightOnStoredFieldBackcompat() throws Exception { - assertAcked(prepareCreate("articles") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("enabled", false).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.highlighter(new HighlightBuilder().field("comments.message")); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsWithExcludeSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("excludes", new String[]{"comments"}).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.field("comments.message"); - builder.setFetchSource(true); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).fields().get("comments.message").getValue(), equalTo("fox eat quick")); - } - - public void testNestedInnerHitsHiglightWithExcludeSourceBackcompat() throws Exception { - assertAcked(prepareCreate("articles").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("article", jsonBuilder().startObject() - .startObject("_source").field("excludes", new String[]{"comments"}).endObject() - .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - - List requests = new ArrayList<>(); - requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() - .field("title", "quick brown fox") - .startObject("comments").field("message", "fox eat quick").endObject() - .endObject())); - indexRandom(true, requests); - InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); - builder.highlighter(new HighlightBuilder().field("comments.message")); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); - assertThat(String.valueOf(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).highlightFields().get("comments.message").getFragments()[0]), equalTo("fox eat quick")); - } - public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { assertAcked(prepareCreate("articles") .addMapping("article", jsonBuilder().startObject() diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index f65b17288ae..e0bc26c9296 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -180,7 +180,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { // the doc id is the tie-breaker } assertThat(topNIds, empty()); - assertThat(searchResponse.getHits().hits()[0].getScore(), equalTo(searchResponse.getHits().hits()[1].getScore())); + assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 079363719f1..44b8636d51a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -375,9 +375,9 @@ public class SearchQueryIT extends ESIntegTestCase { // try the same with multi match query searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats - assertSecondHit(searchResponse, hasId("1")); - assertThirdHit(searchResponse, hasId("2")); + assertFirstHit(searchResponse, hasId("1")); + assertSecondHit(searchResponse, hasId("2")); + assertThirdHit(searchResponse, hasId("3")); } public void testCommonTermsQueryStackedTokens() throws Exception { @@ -467,9 +467,9 @@ public class SearchQueryIT extends ESIntegTestCase { // try the same with multi match query searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3L); - assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats - assertSecondHit(searchResponse, hasId("1")); - assertThirdHit(searchResponse, hasId("2")); + assertFirstHit(searchResponse, hasId("1")); + assertSecondHit(searchResponse, hasId("2")); + assertThirdHit(searchResponse, hasId("3")); } public void testQueryStringAnalyzedWildcard() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index a39c618fe9d..f0bb35cc9d1 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java index 812928dee28..2143c7be9e0 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java @@ -96,7 +96,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); @@ -238,7 +238,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); LaplaceScorer wordScorer = new LaplaceScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.95d, new BytesRef(" "), 0.5f); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); DirectSpellChecker spellchecker = new DirectSpellChecker(); @@ -321,7 +321,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { writer.addDocument(doc); } - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = new LinearInterpoatingScorer(ir, MultiFields.getTerms(ir, "body_ngram"), "body_ngram", 0.85d, new BytesRef(" "), 0.5, 0.4, 0.1); NoisyChannelSpellChecker suggester = new NoisyChannelSpellChecker(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e4a8ae72b91..51152733bf8 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -132,7 +132,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { Document doc = new Document(); doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); - DirectoryReader ir = DirectoryReader.open(writer, false); + DirectoryReader ir = DirectoryReader.open(writer); WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" ")); assertWordScorer(wordScorer, testModel); diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 7e9bd14f9f3..dc803a46412 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -137,6 +137,32 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { return null; } + public static void blockAllDataNodes(String repository) { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + ((MockRepository)repositoriesService.repository(repository)).blockOnDataFiles(true); + } + } + + public static void unblockAllDataNodes(String repository) { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + ((MockRepository)repositoriesService.repository(repository)).unblock(); + } + } + + public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException { + if (false == awaitBusy(() -> { + for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository); + if (mockRepository.blocked()) { + return true; + } + } + return false; + }, timeout.millis(), TimeUnit.MILLISECONDS)) { + fail("Timeout waiting for repository block on any data node!!!"); + } + } + public static void unblockNode(String node) { ((MockRepository)internalCluster().getInstance(RepositoriesService.class, node).repository("test-repo")).unblock(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 65337d4b632..9fb2b0f9989 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1865,6 +1865,66 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } } + public void testCloseIndexDuringRestore() throws Exception { + Client client = client(); + + logger.info("--> creating repository"); + assertAcked(client.admin().cluster().preparePutRepository("test-repo") + .setType("mock").setSettings(Settings.settingsBuilder() + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + )); + + createIndex("test-idx-1", "test-idx-2"); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); + index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i); + } + refresh(); + assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().totalHits(), equalTo(100L)); + assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().totalHits(), equalTo(100L)); + + logger.info("--> snapshot"); + assertThat(client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap") + .setIndices("test-idx-*").setWaitForCompletion(true).get().getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + logger.info("--> deleting indices before restoring"); + assertAcked(client.admin().indices().prepareDelete("test-idx-*").get()); + + blockAllDataNodes("test-repo"); + logger.info("--> execution will be blocked on all data nodes"); + + logger.info("--> start restore"); + ListenableActionFuture restoreFut = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") + .setWaitForCompletion(true) + .execute(); + + logger.info("--> waiting for block to kick in"); + waitForBlockOnAnyDataNode("test-repo", TimeValue.timeValueSeconds(60)); + + logger.info("--> close index while restore is running"); + try { + client.admin().indices().prepareClose("test-idx-1").get(); + fail("Expected closing index to fail during restore"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Cannot close indices that are being restored: [test-idx-1]")); + } + + logger.info("--> unblocking all data nodes"); + unblockAllDataNodes("test-repo"); + + logger.info("--> wait for restore to finish"); + RestoreSnapshotResponse restoreSnapshotResponse = restoreFut.get(); + logger.info("--> check that all shards were recovered"); + assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), greaterThan(0)); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + } + public void testDeleteOrphanSnapshot() throws Exception { Client client = client(); diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java index 427dce714e8..3193aaf458e 100644 --- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java +++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java @@ -21,13 +21,11 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; -import org.elasticsearch.node.service.NodeService; public class NoopDiscovery implements Discovery { @@ -42,11 +40,6 @@ public class NoopDiscovery implements Discovery { return null; } - @Override - public void setNodeService(@Nullable NodeService nodeService) { - - } - @Override public void setRoutingService(RoutingService routingService) { diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 4cc7f8f8487..95984da55f6 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -20,12 +20,12 @@ package org.elasticsearch.test.geo; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import com.spatial4j.core.context.jts.JtsSpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.impl.Range; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; @@ -45,7 +45,7 @@ import org.junit.Assert; import java.util.Random; -import static com.spatial4j.core.shape.SpatialRelation.CONTAINS; +import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS; /** * Random geoshape generation utilities for randomized {@code geo_shape} type testing diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 3400f9637ff..5fff4a61f86 100644 --- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -19,12 +19,12 @@ package org.elasticsearch.test.hamcrest; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; -import com.spatial4j.core.shape.impl.GeoCircle; -import com.spatial4j.core.shape.impl.RectangleImpl; -import com.spatial4j.core.shape.jts.JtsGeometry; -import com.spatial4j.core.shape.jts.JtsPoint; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.impl.GeoCircle; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 5e9bc80b9a9..5fc24094bd3 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -243,9 +243,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { // fuzzy queries assertExplanation(QueryBuilders.fuzzyQuery("field", "the").fuzziness(Fuzziness.fromEdits(2)), - containsString("field:the field:tree^0.3333333"), true); + containsString("field:the (field:tree)^0.3333333"), true); assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"), - containsString("field:jumps^0.75"), true); + containsString("(field:jumps)^0.75"), true); // more like this queries assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, MoreLikeThisQueryBuilder.ids("1")) diff --git a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json index 233d6f3e3d7..0ed95e16332 100644 --- a/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json +++ b/core/src/test/resources/org/elasticsearch/index/analysis/keep_analysis.json @@ -9,9 +9,7 @@ }, "my_case_sensitive_keep_filter":{ "type":"keep", - "keep_words" : ["Hello", "worlD"], - "enable_position_increments" : false, - "version" : "4.2" + "keep_words" : ["Hello", "worlD"] } } } diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 deleted file mode 100644 index dcdeb2cb477..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e0e8243a4410be20c34683034fafa7bb52e55cc \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..74d21bae946 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +3510af19947deadd929123aaf14d69b4bdec759a \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 deleted file mode 100644 index dd5c846363a..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68480974b2f54f519763632a7c1c5d51cbff3805 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..ee6143bec14 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +247ad7c17cb7c742d7a9abd5d9980e4fab815178 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0.jar.sha1 b/distribution/licenses/lucene-core-5.5.0.jar.sha1 deleted file mode 100644 index 70bd0b63bba..00000000000 --- a/distribution/licenses/lucene-core-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a74fd869bb5ad7fe6b4cd29df9543a34aea81164 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..2d39f84d21e --- /dev/null +++ b/distribution/licenses/lucene-core-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +c0712dbec58abad545646edab67d58f7373f5329 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 deleted file mode 100644 index f905a2081b6..00000000000 --- a/distribution/licenses/lucene-grouping-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -437cacec0cfa349b1dee049a7c0e32df3b8ecc07 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..a3ce82c8a04 --- /dev/null +++ b/distribution/licenses/lucene-grouping-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +7573e3efb12dd16fdc991edaf408877dab20c030 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 deleted file mode 100644 index 6ea3c5a0c13..00000000000 --- a/distribution/licenses/lucene-highlighter-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ecdd913cb7c61a5435591f0a7268b01ab3fc782a \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..9259a2c66c1 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +96ef0a9a43a5fc99d27bb7e7d61517ee4c7e54a4 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0.jar.sha1 b/distribution/licenses/lucene-join-5.5.0.jar.sha1 deleted file mode 100644 index 3cc19b170ed..00000000000 --- a/distribution/licenses/lucene-join-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -af4f55e36e3a7d1f4e9ed9efdccf7e22b767d6e8 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..4959f5f163c --- /dev/null +++ b/distribution/licenses/lucene-join-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +d93de34947d37e31a337cdfed400333588c378d8 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0.jar.sha1 deleted file mode 100644 index 1f4ebc783ee..00000000000 --- a/distribution/licenses/lucene-memory-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09a327fe9f20fc7e3912ed213bdd5cb4b6d2a65a \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..5218d0a019e --- /dev/null +++ b/distribution/licenses/lucene-memory-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +9c292930b1828e68f06509944a5346c141d56fd4 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0.jar.sha1 deleted file mode 100644 index 76131ae81c5..00000000000 --- a/distribution/licenses/lucene-misc-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -504d855a1a38190622fdf990b2298c067e7d60ca \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..947722edfd3 --- /dev/null +++ b/distribution/licenses/lucene-misc-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +866ed93f48683e877ffa4d9baa1323dcffbc65d7 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0.jar.sha1 deleted file mode 100644 index 5790b2e4776..00000000000 --- a/distribution/licenses/lucene-queries-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60ca161c1dd5f127907423b6f039b846fb713de0 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..6caf86a6b96 --- /dev/null +++ b/distribution/licenses/lucene-queries-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +967d9c2647bdd4d88961747f7436a5a92aa0385b \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 deleted file mode 100644 index 8e4a1e66138..00000000000 --- a/distribution/licenses/lucene-queryparser-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0fddc49725b562fd48dff0cff004336ad2a090a4 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..b3e92d3f168 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +981030d83a7504267f3141d7365fad9b46d51465 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 deleted file mode 100644 index 20c2a1c9527..00000000000 --- a/distribution/licenses/lucene-sandbox-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7da8e187acd6e4d7781ba41fac8b9082dd27409 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..7b5176c4963 --- /dev/null +++ b/distribution/licenses/lucene-sandbox-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +707691b1baf22c29020569f5b875d200a4955411 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 deleted file mode 100644 index dd645be87e3..00000000000 --- a/distribution/licenses/lucene-spatial-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c14965bf67179bee93cc8efc58d09a75d230c891 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..9df2a16b886 --- /dev/null +++ b/distribution/licenses/lucene-spatial-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +be9e78130a069983f611f484d5b7b87bda0d6370 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..6badc36d361 --- /dev/null +++ b/distribution/licenses/lucene-spatial-extras-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +edeef6ce8a58d5e6a074bebf545918d04e8579e1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 deleted file mode 100644 index c0b9d4ba838..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3e5ab4ea3e2052166100482f7a56b75bfa4ab0ad \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..480ae590aed --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +d86a7ba859576bdcee1dacd8f407ccf71f982c60 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 deleted file mode 100644 index adce0756ecf..00000000000 --- a/distribution/licenses/lucene-suggest-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -51f9d52332f556976a5099817e35d37c69a24597 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..7835298c4a2 --- /dev/null +++ b/distribution/licenses/lucene-suggest-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +a3860de6502576f142dc948eb2005fa4dc0c27c5 \ No newline at end of file diff --git a/distribution/licenses/spatial4j-0.5.jar.sha1 b/distribution/licenses/spatial4j-0.5.jar.sha1 deleted file mode 100644 index 4bcf7a33b15..00000000000 --- a/distribution/licenses/spatial4j-0.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3 \ No newline at end of file diff --git a/distribution/licenses/spatial4j-0.6.jar.sha1 b/distribution/licenses/spatial4j-0.6.jar.sha1 new file mode 100644 index 00000000000..740a25b1c90 --- /dev/null +++ b/distribution/licenses/spatial4j-0.6.jar.sha1 @@ -0,0 +1 @@ +21b15310bddcfd8c72611c180f20cf23279809a3 \ No newline at end of file diff --git a/docs/java-api/query-dsl/geo-shape-query.asciidoc b/docs/java-api/query-dsl/geo-shape-query.asciidoc index c753cd72c1a..e08410acbdb 100644 --- a/docs/java-api/query-dsl/geo-shape-query.asciidoc +++ b/docs/java-api/query-dsl/geo-shape-query.asciidoc @@ -10,9 +10,9 @@ to your classpath in order to use this type: [source,xml] ----------------------------------------------- - com.spatial4j + org.locationtech.spatial4j spatial4j - 0.4.1 <1> + 0.6 <1> @@ -27,7 +27,7 @@ to your classpath in order to use this type: ----------------------------------------------- -<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central] +<1> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.locationtech.spatial4j%22%20AND%20a%3A%22spatial4j%22[Maven Central] <2> check for updates in http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.vividsolutions%22%20AND%20a%3A%22jts%22[Maven Central] [source,java] diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index 8b1f58f7ff0..e649928810b 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -111,3 +111,35 @@ Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a colon, Zone names:: Time zone names ('z') cannot be parsed. Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'..'Z'] will be treated as quoted text. For instance, characters like ':', '.', ' ', '#' and '?' will appear in the resulting time text even they are not embraced within single quotes. + +[[time-zones]] +==== Time zone in date range aggregations + +Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. + +Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of +the the http://joda-time.sourceforge.net/timezones.html[time zone ids] from the TZ database. + +The `time_zone` parameter is also applied to rounding in date math expressions. As an example, +to round to the beginning of the day in the CET time zone, you can do the following: + +[source,js] +-------------------------------------------------- +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "time_zone": "CET", + "ranges": [ + { "to": "2016-02-15/d" }, <1> + { "from": "2016-02-15/d", "to" : "now/d" <2>}, + { "from": "now/d" }, + ] + } + } + } + } +-------------------------------------------------- +<1> This date will be converted to `2016-02-15T00:00:00.000+01:00`. +<2> `now/d` will be rounded to the beginning of the day in the CET time zone. diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index a3072768ca6..5ed979abd0d 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -17,7 +17,7 @@ The second command selectively retrieves nodes information of only By default, it just returns all attributes and core settings for a node. It also allows to get only information on `settings`, `os`, `process`, `jvm`, -`thread_pool`, `transport`, `http` and `plugins`: +`thread_pool`, `transport`, `http`, `plugins` and `ingest`: [source,js] -------------------------------------------------- @@ -122,3 +122,71 @@ The result will look similar to: } } -------------------------------------------------- + +[float] +[[ingest-info]] +==== Ingest information + +`ingest` - if set, the result will contain details about the available +processors per node: + +* `type`: the processor type + +The result will look similar to: + +[source,js] +-------------------------------------------------- +{ + "cluster_name": "elasticsearch", + "nodes": { + "O70_wBv6S9aPPcAKdSUBtw": { + "ingest": { + "processors": [ + { + "type": "date" + }, + { + "type": "uppercase" + }, + { + "type": "set" + }, + { + "type": "lowercase" + }, + { + "type": "gsub" + }, + { + "type": "convert" + }, + { + "type": "remove" + }, + { + "type": "fail" + }, + { + "type": "foreach" + }, + { + "type": "split" + }, + { + "type": "trim" + }, + { + "type": "rename" + }, + { + "type": "join" + }, + { + "type": "append" + } + ] + } + } + } +} +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 7d6614342b5..f7e1f68dec5 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -148,6 +148,10 @@ Other index settings are available in index modules: Enable or disable dynamic mapping for an index. +<>:: + + Control over how shards are merged by the background merge process. + <>:: Configure custom similarity settings to customize how search results are @@ -173,6 +177,8 @@ include::index-modules/allocation.asciidoc[] include::index-modules/mapper.asciidoc[] +include::index-modules/merge.asciidoc[] + include::index-modules/similarity.asciidoc[] include::index-modules/slowlog.asciidoc[] diff --git a/docs/reference/index-modules/merge.asciidoc b/docs/reference/index-modules/merge.asciidoc new file mode 100644 index 00000000000..7e5260f95d4 --- /dev/null +++ b/docs/reference/index-modules/merge.asciidoc @@ -0,0 +1,30 @@ +[[index-modules-merge]] +== Merge + +A shard in elasticsearch is a Lucene index, and a Lucene index is broken down +into segments. Segments are internal storage elements in the index where the +index data is stored, and are immutable. Smaller segments are periodically +merged into larger segments to keep the index size at bay and to expunge +deletes. + +The merge process uses auto-throttling to balance the use of hardware +resources between merging and other activities like search. + +[float] +[[merge-scheduling]] +=== Merge scheduling + +The merge scheduler (ConcurrentMergeScheduler) controls the execution of merge +operations when they are needed. Merges run in separate threads, and when the +maximum number of threads is reached, further merges will wait until a merge +thread becomes available. + +The merge scheduler supports the following _dynamic_ setting: + +`index.merge.scheduler.max_thread_count`:: + + The maximum number of threads that may be merging at once. Defaults to + `Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors() / 2))` + which works well for a good solid-state-disk (SSD). If your index is on + spinning platter drives instead, decrease this to 1. + diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 95d7005ee34..0827baa6ea1 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -620,6 +620,20 @@ but is very useful for bookkeeping and tracing errors to specific processors. See <> to learn more about the `on_failure` field and error handling in pipelines. +The <> can be used to figure out what processors are available in a cluster. +The <> will provide a per node list of what processors are available. + +Custom processors must be installed on all nodes. The put pipeline API will fail if a processor specified in a pipeline +doesn't exist on all nodes. If you rely on custom processor plugins make sure to mark these plugins as mandatory by adding +`plugin.mandatory` setting to the `config/elasticsearch.yml` file, for example: + +[source,yaml] +-------------------------------------------------- +plugin.mandatory: ingest-attachment,ingest-geoip +-------------------------------------------------- + +A node will not start if either of these plugins are not available. + [[append-procesesor]] === Append Processor Appends one or more values to an existing array if the field already exists and it is an array. diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index 24951bbe930..01c3c35db54 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -23,7 +23,7 @@ an example: ==== Scoring capabilities The `has_child` also has scoring support. The -supported score modes are `min`, `max`, `total`, `avg` or `none`. The default is +supported score modes are `min`, `max`, `sum`, `avg` or `none`. The default is `none` and yields the same behaviour as in previous versions. If the score mode is set to another value than `none`, the scores of all the matching child documents are aggregated into the associated parent diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 03037207fb0..bef563cd965 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -43,6 +43,13 @@ using the <> API, with: curl localhost:9200/_nodes/stats/process?pretty -------------------------------------------------- +[float] +[[max-number-of-threads]] +==== Number of threads + +Make sure that the number of threads that the Elasticsearch user can +create is at least 2048. + [float] [[vm-max-map-count]] ==== Virtual memory diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 deleted file mode 100644 index 15c992bf460..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4766406a2933ac9df62c49d6619caabb9943aba2 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..d9a29f17c50 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +8d11bf581b0afc25f87a57c06834cd85930d2ffa \ No newline at end of file diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java index 7d018adc07f..e717ea6d6fb 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.bucket.missing.Missing; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; @@ -38,6 +40,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.search.aggregations.AggregationBuilders.missing; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -498,6 +502,42 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { checkUpperLowerBounds(stats, sigma); } + public void testEmptySubAggregation() { + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(terms("value").field("value") + .subAggregation(missing("values").field("values") + .subAggregation(extendedStats("stats").field("value")))) + .execute().actionGet(); + + assertHitCount(searchResponse, 10); + + Terms terms = searchResponse.getAggregations().get("value"); + assertThat(terms, notNullValue()); + assertThat(terms.getBuckets().size(), equalTo(10)); + + for (Terms.Bucket bucket : terms.getBuckets()) { + assertThat(bucket.getDocCount(), equalTo(1L)); + + Missing missing = bucket.getAggregations().get("values"); + assertThat(missing, notNullValue()); + assertThat(missing.getDocCount(), equalTo(0L)); + + ExtendedStats stats = missing.getAggregations().get("stats"); + assertThat(stats, notNullValue()); + assertThat(stats.getName(), equalTo("stats")); + assertThat(stats.getSumOfSquares(), equalTo(0.0)); + assertThat(stats.getCount(), equalTo(0L)); + assertThat(stats.getSum(), equalTo(0.0)); + assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY)); + assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY)); + assertThat(Double.isNaN(stats.getStdDeviation()), is(true)); + assertThat(Double.isNaN(stats.getAvg()), is(true)); + assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER)), is(true)); + assertThat(Double.isNaN(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER)), is(true)); + } + } + private void assertShardExecutionState(SearchResponse response, int expectedFailures) throws Exception { ShardSearchFailure[] failures = response.getShardFailures(); @@ -515,4 +555,4 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { assertThat(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), equalTo(stats.getAvg() - (stats.getStdDeviation() * sigma))); } -} \ No newline at end of file +} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java index fffeabcb807..4689d5fba03 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java @@ -169,14 +169,21 @@ public class FunctionScoreTests extends ESIntegTestCase { } } + /** make sure min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 */ public void testWithEmptyFunctions() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test")); ensureYellow(); index("test", "testtype", "1", jsonBuilder().startObject().field("text", "test text").endObject()); refresh(); - // make sure that min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 - float termQueryScore = 0.19178301f; + SearchResponse termQuery = client().search( + searchRequest().source( + searchSource().explain(true).query( + termQuery("text", "text")))).get(); + assertSearchResponse(termQuery); + assertThat(termQuery.getHits().totalHits(), equalTo(1L)); + float termQueryScore = termQuery.getHits().getAt(0).getScore(); + for (CombineFunction combineFunction : CombineFunction.values()) { testMinScoreApplied(combineFunction, termQueryScore); } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index c23e5da95a1..7f84c1aac8b 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -58,9 +58,6 @@ --- "wait_for_completion=false": - - skip: - version: "0.0.0 - " - reason: breaks other tests by leaving a running reindex behind - do: index: index: source @@ -79,6 +76,7 @@ dest: index: dest - match: {task: '/.+:\d+/'} + - set: {task: task} - is_false: updated - is_false: version_conflicts - is_false: batches @@ -87,6 +85,11 @@ - is_false: took - is_false: created + - do: + tasks.list: + wait_for_completion: true + task_id: $task + --- "Response format for version conflict": - do: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml index 383e945bbf2..94ffa2349a9 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/10_basic.yaml @@ -37,6 +37,7 @@ wait_for_completion: false index: test - match: {task: '/.+:\d+/'} + - set: {task: task} - is_false: updated - is_false: version_conflicts - is_false: batches @@ -45,6 +46,11 @@ - is_false: took - is_false: created + - do: + tasks.list: + wait_for_completion: true + task_id: $task + --- "Response for version conflict": - do: diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 deleted file mode 100644 index 18440dcdc04..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69a6e72d322b6643f1b419e6c9cc46623a2404e9 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..538d2ad8216 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +38fda9b86e4f68eb6c9d31fb636a2540da219927 \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java index 24890fed5a9..5f3e1644481 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IndexableBinaryStringToolsTests.java @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; import org.junit.BeforeClass; @@ -110,14 +109,14 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase { int encodedLen1 = IndexableBinaryStringTools.getEncodedLength( originalArray1, 0, numBytes1); if (encodedLen1 > encoded1.length) - encoded1 = new char[ArrayUtil.oversize(encodedLen1, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded1 = new char[ArrayUtil.oversize(encodedLen1, Character.BYTES)]; IndexableBinaryStringTools.encode(originalArray1, 0, numBytes1, encoded1, 0, encodedLen1); int encodedLen2 = IndexableBinaryStringTools.getEncodedLength(original2, 0, numBytes2); if (encodedLen2 > encoded2.length) - encoded2 = new char[ArrayUtil.oversize(encodedLen2, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded2 = new char[ArrayUtil.oversize(encodedLen2, Character.BYTES)]; IndexableBinaryStringTools.encode(original2, 0, numBytes2, encoded2, 0, encodedLen2); @@ -196,7 +195,7 @@ public class IndexableBinaryStringToolsTests extends LuceneTestCase { int encodedLen = IndexableBinaryStringTools.getEncodedLength(binary, 0, numBytes); if (encoded.length < encodedLen) - encoded = new char[ArrayUtil.oversize(encodedLen, RamUsageEstimator.NUM_BYTES_CHAR)]; + encoded = new char[ArrayUtil.oversize(encodedLen, Character.BYTES)]; IndexableBinaryStringTools.encode(binary, 0, numBytes, encoded, 0, encodedLen); diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 deleted file mode 100644 index 832db46564e..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9d68dd5d9fae3349b81de5952d0ee8115c696a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..b90115da4ab --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +352fea7a169ada6a7ae18e4ec34559496e09b465 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 deleted file mode 100644 index 3436526863d..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c4735c43440ebcb20f2b6f49f508fedc12f5366c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..7cbe648e0bd --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +445f5ea7822d0dd6b91364ec119cd6cb4635d285 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 deleted file mode 100644 index 95b85f7edbd..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a31a4d1476d45738a460374d9801dc5ed9b49c1a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..03c96786de2 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +0b216b7b9ff583bc1382edc8adfee4d4acd02859 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 deleted file mode 100644 index d5a28231e65..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1a7505d011aca54c004d0fc86a490d5f054bb903 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 new file mode 100644 index 00000000000..f27a98f63ba --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.0-snapshot-bea235f.jar.sha1 @@ -0,0 +1 @@ +8d161a8c7e5b5b82f64dc5df2ca46197a3716672 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml index ed752971fcb..67bb7340ce3 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml @@ -1,5 +1,11 @@ "Ingest attachment plugin installed": - do: - cluster.stats: {} + cluster.state: {} - - match: { nodes.plugins.0.name: ingest-attachment } + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-attachment } + - match: { nodes.$master.ingest.processors.11.type: attachment } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index b522cb77780..b924484aa7d 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -1,5 +1,11 @@ "Ingest plugin installed": - do: - cluster.stats: {} + cluster.state: {} - - match: { nodes.plugins.0.name: ingest-geoip } + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-geoip } + - match: { nodes.$master.ingest.processors.3.type: geoip } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index ce78c75d783..802ca1d7653 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -26,7 +26,6 @@ import java.util.Map; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; @@ -72,12 +71,10 @@ public class Murmur3FieldMapper extends LongFieldMapper { @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); - if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) { - fieldType.setIndexOptions(IndexOptions.NONE); - defaultFieldType.setIndexOptions(IndexOptions.NONE); - fieldType.setHasDocValues(true); - defaultFieldType.setHasDocValues(true); - } + fieldType.setIndexOptions(IndexOptions.NONE); + defaultFieldType.setIndexOptions(IndexOptions.NONE); + fieldType.setHasDocValues(true); + defaultFieldType.setHasDocValues(true); } @Override @@ -97,17 +94,11 @@ public class Murmur3FieldMapper extends LongFieldMapper { Builder builder = new Builder(name); // tweaking these settings is no longer allowed, the entire purpose of murmur3 fields is to store a hash - if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { - if (node.get("doc_values") != null) { - throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]"); - } - if (node.get("index") != null) { - throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); - } + if (node.get("doc_values") != null) { + throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]"); } - - if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.indexOptions(IndexOptions.DOCS); + if (node.get("index") != null) { + throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); } parseNumberField(builder, name, node, parserContext); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 072c0db3e59..16865eb98b6 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -22,10 +22,7 @@ package org.elasticsearch.index.mapper.murmur3; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -33,22 +30,14 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - MapperRegistry mapperRegistry; IndexService indexService; DocumentMapperParser parser; @@ -131,38 +120,4 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { assertTrue(e.getMessage().contains("Setting [index] cannot be modified")); } } - - public void testDocValuesSettingBackcompat() throws Exception { - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - indexService = createIndex("test_bwc", settings); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "murmur3") - .field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertFalse(mapper.fieldType().hasDocValues()); - } - - public void testIndexSettingBackcompat() throws Exception { - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - indexService = createIndex("test_bwc", settings); - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "murmur3") - .field("index", "not_analyzed") - .endObject().endObject().endObject().endObject().string(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertEquals(IndexOptions.DOCS, mapper.fieldType().indexOptions()); - } - - // TODO: add more tests } diff --git a/plugins/mapper-size/build.gradle b/plugins/mapper-size/build.gradle index 7af65d19ef3..7d5aa1ee276 100644 --- a/plugins/mapper-size/build.gradle +++ b/plugins/mapper-size/build.gradle @@ -22,3 +22,6 @@ esplugin { classname 'org.elasticsearch.plugin.mapper.MapperSizePlugin' } +// TODO: migrate to points +compileJava.options.compilerArgs << "-Xlint:-deprecation" +compileTestJava.options.compilerArgs << "-Xlint:-deprecation" diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 984e83a438e..cfc7e29486c 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.size; import org.apache.lucene.document.Field; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +38,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore; public class SizeFieldMapper extends MetadataFieldMapper { @@ -94,9 +92,6 @@ public class SizeFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); - } else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.store(parseStore(fieldName, fieldNode.toString(), parserContext)); - iterator.remove(); } } return builder; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index d6b64df9e5d..174520cfada 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,30 +19,20 @@ package org.elasticsearch.index.mapper.size; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -55,15 +45,9 @@ public class SizeMappingTests extends ESSingleNodeTestCase { MapperService mapperService; DocumentMapperParser parser; - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); // uses index.version.created - } - @Before public void before() { indexService = createIndex("test"); - Map metadataMappers = new HashMap<>(); IndicesModule indices = new IndicesModule(); indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext); @@ -87,31 +71,6 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); } - public void testSizeEnabledAndStoredBackcompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).field("store", "yes").endObject() - .endObject().endObject().string(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - - indexService = createIndex("test2", indexSettings); - MapperRegistry mapperRegistry = new MapperRegistry( - Collections.emptyMap(), - Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser())); - parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, - indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1")); - - assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); - assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - public void testSizeDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java index 88b9d187dcf..fe2c32723e2 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/SmbDirectoryWrapper.java @@ -60,7 +60,7 @@ public final class SmbDirectoryWrapper extends FilterDirectory { static final int CHUNK_SIZE = 8192; public SmbFSIndexOutput(String name) throws IOException { - super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) { + super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", name, new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) { // This implementation ensures, that we never write more than CHUNK_SIZE bytes: @Override public void write(byte[] b, int offset, int length) throws IOException { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json index 9fe9bfe3cad..c3dc0a18b45 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json @@ -44,13 +44,13 @@ "type" : "string", "description" : "The name of the tokenizer to use for the analysis" }, - "detail": { + "explain": { "type" : "boolean", "description" : "With `true`, outputs more advanced details. (default: false)" }, "attributes": { "type" : "list", - "description" : "A comma-separated list of token attributes to output, this parameter works only with `detail=true`" + "description" : "A comma-separated list of token attributes to output, this parameter works only with `explain=true`" }, "format": { "type": "enum", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json index 43be35a5a86..12f0d11c5fc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json @@ -12,7 +12,7 @@ }, "metric": { "type": "list", - "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins"], + "options": ["settings", "os", "process", "jvm", "thread_pool", "transport", "http", "plugins", "ingest"], "description": "A comma-separated list of metrics you wish returned. Leave empty to return all." } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json index 7e8683b3475..5cdeed1b142 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -31,6 +31,10 @@ "parent_task": { "type" : "number", "description" : "Return tasks with specified parent task id. Set to -1 to return all." + }, + "wait_for_completion": { + "type": "boolean", + "description": "Wait for the matching tasks to complete (default: false)" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml index 88160ef4f1e..93ffe0d5db1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml @@ -75,7 +75,7 @@ setup: "Detail response with Analyzer": - do: indices.analyze: - body: {"text": "This is troubled", "analyzer": standard, "explain": true} + body: {"text": "This is troubled", "analyzer": standard, "explain": "true"} - length: { detail.analyzer.tokens: 3 } - match: { detail.analyzer.name: standard } - match: { detail.analyzer.tokens.0.token: this } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml index b494161aff1..ced2e9e4850 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/10_crud.yaml @@ -1,3 +1,28 @@ +--- +"Check availability of default processors": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.ingest.processors.0.type: date } + - match: { nodes.$master.ingest.processors.1.type: uppercase } + - match: { nodes.$master.ingest.processors.2.type: set } + - match: { nodes.$master.ingest.processors.3.type: lowercase } + - match: { nodes.$master.ingest.processors.4.type: gsub } + - match: { nodes.$master.ingest.processors.5.type: convert } + - match: { nodes.$master.ingest.processors.6.type: remove } + - match: { nodes.$master.ingest.processors.7.type: fail } + - match: { nodes.$master.ingest.processors.8.type: foreach } + - match: { nodes.$master.ingest.processors.9.type: split } + - match: { nodes.$master.ingest.processors.10.type: trim } + - match: { nodes.$master.ingest.processors.11.type: rename } + - match: { nodes.$master.ingest.processors.12.type: join } + - match: { nodes.$master.ingest.processors.13.type: append } + --- "Test basic pipeline crud": - do: diff --git a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java index 330758223a5..576ecf2d1ee 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -29,6 +29,10 @@ import org.elasticsearch.test.StreamsUtils; import org.junit.After; import org.junit.Before; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.isEmptyString; +import static org.hamcrest.Matchers.not; + public abstract class CliToolTestCase extends ESTestCase { @Before @@ -52,8 +56,10 @@ public abstract class CliToolTestCase extends ESTestCase { public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException { String output = terminal.getOutput(); - assertFalse(output, output.isEmpty()); + assertThat(output, not(isEmptyString())); String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath); - assertTrue(output, output.contains(expectedDocs)); + // convert to *nix newlines as MockTerminal used for tests also uses *nix newlines + expectedDocs = expectedDocs.replace("\r\n", "\n"); + assertThat(output, containsString(expectedDocs)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4a20d3c3fd6..84d88733802 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -631,27 +631,6 @@ public abstract class ESTestCase extends LuceneTestCase { assertEquals(expected.isNativeMethod(), actual.isNativeMethod()); } - /** A runnable that can throw any checked exception. */ - @FunctionalInterface - public interface ThrowingRunnable { - void run() throws Throwable; - } - - /** Checks a specific exception class is thrown by the given runnable, and returns it. */ - public static T expectThrows(Class expectedType, ThrowingRunnable runnable) { - try { - runnable.run(); - } catch (Throwable e) { - if (expectedType.isInstance(e)) { - return expectedType.cast(e); - } - AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName()); - assertion.initCause(e); - throw assertion; - } - throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName()); - } - protected static long spinForAtLeastOneMillisecond() { long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); // force at least one millisecond to elapse, but ensure the diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5684717342d..fbc518b136d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -19,14 +19,34 @@ package org.elasticsearch.test.rest; -import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.apache.lucene.util.IOUtils; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.client.RestException; +import org.elasticsearch.test.rest.client.RestResponse; import org.elasticsearch.test.rest.parser.RestTestParseException; import org.elasticsearch.test.rest.parser.RestTestSuiteParser; import org.elasticsearch.test.rest.section.DoSection; @@ -42,24 +62,11 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; -import java.io.InputStream; -import java.net.InetSocketAddress; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; +import com.carrotsearch.randomizedtesting.RandomizedTest; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.sort; /** * Runs the clients test suite against an elasticsearch cluster. @@ -261,7 +268,6 @@ public abstract class ESRestTestCase extends ESTestCase { @After public void wipeCluster() throws Exception { - // wipe indices Map deleteIndicesArgs = new HashMap<>(); deleteIndicesArgs.put("index", "*"); @@ -285,6 +291,30 @@ public abstract class ESRestTestCase extends ESTestCase { adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap()); } + /** + * Logs a message if there are still running tasks. The reasoning is that any tasks still running are state the is trying to bleed into + * other tests. + */ + @After + public void logIfThereAreRunningTasks() throws InterruptedException, IOException, RestException { + RestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap()); + Set runningTasks = runningTasks(tasks); + // Ignore the task list API - it doens't count against us + runningTasks.remove(ListTasksAction.NAME); + runningTasks.remove(ListTasksAction.NAME + "[n]"); + if (runningTasks.isEmpty()) { + return; + } + List stillRunning = new ArrayList<>(runningTasks); + sort(stillRunning); + logger.info("There are still tasks running after this test that might break subsequent tests {}.", stillRunning); + /* + * This isn't a higher level log or outright failure because some of these tasks are run by the cluster in the background. If we + * could determine that some tasks are run by the user we'd fail the tests if those tasks were running and ignore any background + * tasks. + */ + } + @AfterClass public static void close() { if (restTestExecutionContext != null) { @@ -365,4 +395,19 @@ public abstract class ESRestTestCase extends ESTestCase { executableSection.execute(restTestExecutionContext); } } + + @SuppressWarnings("unchecked") + public Set runningTasks(RestResponse response) throws IOException { + Set runningTasks = new HashSet<>(); + Map nodes = (Map) response.evaluate("nodes"); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map nodeTasks = (Map) nodeInfo.get("tasks"); + for (Map.Entry taskAndName : nodeTasks.entrySet()) { + Map task = (Map) taskAndName.getValue(); + runningTasks.add(task.get("action").toString()); + } + } + return runningTasks; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java index 6a484e9ae69..79f7502fb27 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java @@ -114,9 +114,10 @@ public class HttpRequestBuilder { for (String pathPart : path) { try { finalPath.append('/'); - URI uri = new URI(null, null, null, -1, pathPart, null, null); + // We append "/" to the path part to handle parts that start with - or other invalid characters + URI uri = new URI(null, null, null, -1, "/" + pathPart, null, null); //manually escape any slash that each part may contain - finalPath.append(uri.getRawPath().replaceAll("/", "%2F")); + finalPath.append(uri.getRawPath().substring(1).replaceAll("/", "%2F")); } catch(URISyntaxException e) { throw new RuntimeException("unable to build uri", e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index ef3be122cdb..c945a308363 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -173,8 +173,7 @@ public class MockFSDirectoryService extends FsDirectoryService { w.setCheckIndexOnClose(false); // we do this on the index level w.setPreventDoubleWrite(preventDoubleWrite); // TODO: make this test robust to virus scanner - w.setEnableVirusScanner(false); - w.setNoDeleteOpenFile(noDeleteOpenFile); + w.setAssertNoDeleteOpenFile(false); w.setUseSlowOpenClosers(false); LuceneTestCase.closeAfterSuite(new CloseableDirectory(w)); return w; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java index e15b62147cf..298f230d64a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java @@ -74,7 +74,7 @@ public class RestTestParserTests extends ESTestCase { "\"Get type mapping - pre 1.0\":\n" + "\n" + " - skip:\n" + - " version: \"0.90.9 - \"\n" + + " version: \"2.0.0 - \"\n" + " reason: \"for newer versions the index name is always returned\"\n" + "\n" + " - do:\n" + @@ -121,7 +121,7 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_9)); + assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java index 9dd388056d5..b3fe1f0f23b 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java @@ -57,7 +57,7 @@ public class SetupSectionParserTests extends AbstractParserTestCase { public void testParseSetupAndSkipSectionNoSkip() throws Exception { parser = YamlXContent.yamlXContent.createParser( " - skip:\n" + - " version: \"0.90.0 - 0.90.7\"\n" + + " version: \"2.0.0 - 2.3.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do:\n" + " index1:\n" + @@ -79,8 +79,8 @@ public class SetupSectionParserTests extends AbstractParserTestCase { assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); assertThat(setupSection.getSkipSection(), notNullValue()); - assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0)); - assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7)); + assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(setupSection.getDoSections().size(), equalTo(2)); assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1")); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java index 5864e78134d..39b0f284b5e 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.nullValue; public class SkipSectionParserTests extends AbstractParserTestCase { public void testParseSkipSectionVersionNoFeature() throws Exception { parser = YamlXContent.yamlXContent.createParser( - "version: \" - 0.90.2\"\n" + + "version: \" - 2.1.0\"\n" + "reason: Delete ignores the parent param" ); @@ -44,7 +44,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); - assertThat(skipSection.getUpperVersion(), equalTo(Version.V_0_90_2)); + assertThat(skipSection.getUpperVersion(), equalTo(Version.V_2_1_0)); assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); } @@ -144,4 +144,4 @@ public class SkipSectionParserTests extends AbstractParserTestCase { assertThat(e.getMessage(), is("version or features is mandatory within skip section")); } } -} \ No newline at end of file +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java index c157610b645..d034ae56a71 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java @@ -70,7 +70,7 @@ public class TestSectionParserTests extends AbstractParserTestCase { String yaml = "\"First test section\": \n" + " - skip:\n" + - " version: \"0.90.0 - 0.90.7\"\n" + + " version: \"2.0.0 - 2.2.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do :\n" + " catch: missing\n" + @@ -87,8 +87,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); assertThat(testSection.getSkipSection(), notNullValue()); - assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_0_90_0)); - assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_0_90_7)); + assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_2_0)); assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection)testSection.getExecutableSections().get(0); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java index cc2f613eb27..ea1929a55b0 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/VersionUtilsTests.java @@ -32,7 +32,7 @@ public class VersionUtilsTests extends ESTestCase { assertTrue(allVersions.get(i).before(allVersions.get(j))); } } - + public void testRandomVersionBetween() { // full range Version got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT); @@ -46,34 +46,34 @@ public class VersionUtilsTests extends ESTestCase { assertTrue(got.onOrBefore(Version.CURRENT)); // sub range - got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, Version.V_1_4_5); - assertTrue(got.onOrAfter(Version.V_0_90_12)); - assertTrue(got.onOrBefore(Version.V_1_4_5)); + got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0); + assertTrue(got.onOrAfter(Version.V_2_0_0)); + assertTrue(got.onOrBefore(Version.V_5_0_0)); // unbounded lower - got = VersionUtils.randomVersionBetween(random(), null, Version.V_1_4_5); + got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(Version.V_1_4_5)); + assertTrue(got.onOrBefore(Version.V_5_0_0)); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); // unbounded upper - got = VersionUtils.randomVersionBetween(random(), Version.V_0_90_12, null); - assertTrue(got.onOrAfter(Version.V_0_90_12)); + got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, null); + assertTrue(got.onOrAfter(Version.V_2_0_0)); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null); assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); - + // range of one got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); assertEquals(got, Version.CURRENT); - got = VersionUtils.randomVersionBetween(random(), Version.V_1_2_4, Version.V_1_2_4); - assertEquals(got, Version.V_1_2_4); - + got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_0_0); + assertEquals(got, Version.V_5_0_0); + // implicit range of one got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion());