diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index d507c54c45a..5775b2b6323 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -183,4 +183,12 @@ if (project != rootProject) { testClass = 'org.elasticsearch.gradle.test.GradleUnitTestCase' integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase' } + + /* + * We alread configure publication and we don't need or want this one that + * comes from the java-gradle-plugin. + */ + afterEvaluate { + generatePomFileForPluginMavenPublication.enabled = false + } } diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 17e5cb5ff01..023d5d5b8dc 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0 +lucene = 7.5.0-snapshot-608f0277b0 # optional dependencies spatial4j = 0.7 diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 948a3387f03..6e127a6ccfc 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,7 +1,7 @@ :version: 7.0.0-alpha1 :major-version: 7.x -:lucene_version: 7.4.0 -:lucene_version_path: 7_4_0 +:lucene_version: 7.5.0 +:lucene_version_path: 7_5_0 :branch: master :jdk: 1.8.0_131 :jdk_major: 8 diff --git a/docs/java-rest/high-level/licensing/put-license.asciidoc b/docs/java-rest/high-level/licensing/put-license.asciidoc index 7456fdd3c02..a270d658ddd 100644 --- a/docs/java-rest/high-level/licensing/put-license.asciidoc +++ b/docs/java-rest/high-level/licensing/put-license.asciidoc @@ -33,10 +33,9 @@ include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-response] -------------------------------------------------- <1> The status of the license <2> Make sure that the license is valid. -<3> Check the acknowledge flag. -<4> It should be true if license is acknowledge. -<5> Otherwise we can see the acknowledge messages in `acknowledgeHeader()` and check -component-specific messages in `acknowledgeMessages()`. +<3> Check the acknowledge flag. It should be true if license is acknowledged. +<4> Otherwise we can see the acknowledge messages in `acknowledgeHeader()` +<5> and check component-specific messages in `acknowledgeMessages()`. [[java-rest-high-put-license-async]] ==== Asynchronous Execution diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index dc3222a5f37..5eeb3a4605a 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -172,7 +172,7 @@ In the example below we search across crawled webpages. For each webpage we stor belong to. By defining a `terms` aggregator on the `domain` field we group the result set of webpages by domain. The `top_hits` aggregator is then defined as sub-aggregator, so that the top matching hits are collected per bucket. -Also a `max` aggregator is defined which is used by the `terms` aggregator's order feature the return the buckets by +Also a `max` aggregator is defined which is used by the `terms` aggregator's order feature to return the buckets by relevancy order of the most relevant document in a bucket. [source,js] diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 57be634e835..27d5cc316a6 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -1,9 +1,18 @@ [[cluster-update-settings]] == Cluster Update Settings -Allows to update cluster wide specific settings. Settings updated can -either be persistent (applied across restarts) or transient (will not -survive a full cluster restart). Here is an example: +Use this API to review and change cluster-wide settings. + +To review cluster settings: + +[source,js] +-------------------------------------------------- +GET /_cluster/settings +-------------------------------------------------- +// CONSOLE + +Updates to settings can be persistent, meaning they apply across restarts, or transient, where they don't +survive a full cluster restart. Here is an example of a persistent update: [source,js] -------------------------------------------------- @@ -16,7 +25,7 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE -Or: +This update is transient: [source,js] -------------------------------------------------- @@ -29,8 +38,7 @@ PUT /_cluster/settings?flat_settings=true -------------------------------------------------- // CONSOLE -The cluster responds with the settings updated. So the response for the -last example will be: +The response to an update returns the changed setting, as in this response to the transient example: [source,js] -------------------------------------------------- @@ -44,11 +52,14 @@ last example will be: -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"acknowledged": true,/] -Resetting persistent or transient settings can be done by assigning a -`null` value. If a transient setting is reset, the persistent setting -is applied if available. Otherwise Elasticsearch will fallback to the setting -defined at the configuration file or, if not existent, to the default -value. Here is an example: +You can reset persistent or transient settings by assigning a +`null` value. If a transient setting is reset, the first one of these values that is defined is applied: + +* the persistent setting +* the setting in the configuration file +* the default value. + +This example resets a setting: [source,js] -------------------------------------------------- @@ -61,8 +72,7 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE -Reset settings will not be included in the cluster response. So -the response for the last example will be: +The response does not include settings that have been reset: [source,js] -------------------------------------------------- @@ -74,8 +84,8 @@ the response for the last example will be: -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"acknowledged": true,/] -Settings can also be reset using simple wildcards. For instance to reset -all dynamic `indices.recovery` setting a prefix can be used: +You can also reset settings using wildcards. For example, to reset +all dynamic `indices.recovery` settings: [source,js] -------------------------------------------------- @@ -88,25 +98,19 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE -Cluster wide settings can be returned using: - -[source,js] --------------------------------------------------- -GET /_cluster/settings --------------------------------------------------- -// CONSOLE [float] -=== Precedence of settings +=== Order of Precedence -Transient cluster settings take precedence over persistent cluster settings, -which take precedence over settings configured in the `elasticsearch.yml` -config file. +The order of precedence for cluster settings is: -For this reason it is preferrable to use the `elasticsearch.yml` file only -for local configurations, and set all cluster-wider settings with the +1. transient cluster settings +2. persistent cluster settings +3. settings in the `elasticsearch.yml` configuration file. + +It's best to use the `elasticsearch.yml` file only +for local configurations, and set all cluster-wide settings with the `settings` API. -A list of dynamically updatable settings can be found in the -<> documentation. +You can find the list of settings that you can dynamically update in <>. diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java index 1100884cf8a..acfbc04f872 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java @@ -78,7 +78,6 @@ public abstract class ArrayValuesSourceParser implement throws IOException { List fields = null; - ValueType valueType = null; String format = null; Map missingMap = null; Map otherOptions = new HashMap<>(); @@ -145,9 +144,6 @@ public abstract class ArrayValuesSourceParser implement if (fields != null) { factory.fields(fields); } - if (valueType != null) { - factory.valueType(valueType); - } if (format != null) { factory.format(format); } diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 deleted file mode 100644 index 2b14a61f264..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f0a326f7ec1671ffb07f95b27f1a5812b7dc1c3 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..908f70131b3 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +bd7d8078a2d0ad11a24f54156cc015630c96858a \ No newline at end of file diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/PainlessExtension.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/PainlessExtension.java index 9434e6986c0..eb971353437 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/PainlessExtension.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/PainlessExtension.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.spi; +import org.elasticsearch.script.ScriptContext; + import java.util.List; import java.util.Map; -import org.elasticsearch.script.ScriptContext; - public interface PainlessExtension { Map, List> getContextWhitelists(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index dad8da06e76..8a90f53b4fd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -421,7 +421,7 @@ public final class Def { PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); if (struct != null) { - MethodHandle handle = struct.getters.get(name); + MethodHandle handle = struct.getterMethodHandles.get(name); if (handle != null) { return handle; } @@ -431,7 +431,7 @@ public final class Def { struct = painlessLookup.getPainlessStructFromJavaClass(iface); if (struct != null) { - MethodHandle handle = struct.getters.get(name); + MethodHandle handle = struct.getterMethodHandles.get(name); if (handle != null) { return handle; } @@ -492,7 +492,7 @@ public final class Def { PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); if (struct != null) { - MethodHandle handle = struct.setters.get(name); + MethodHandle handle = struct.setterMethodHandles.get(name); if (handle != null) { return handle; } @@ -502,7 +502,7 @@ public final class Def { struct = painlessLookup.getPainlessStructFromJavaClass(iface); if (struct != null) { - MethodHandle handle = struct.setters.get(name); + MethodHandle handle = struct.setterMethodHandles.get(name); if (handle != null) { return handle; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java index 1236c4977e8..7bef028c7d1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.api.Debug; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.script.ScriptException; import java.util.List; @@ -58,7 +59,7 @@ public class PainlessExplainError extends Error { javaClassName = objectToExplain.getClass().getName(); PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(objectToExplain.getClass()); if (struct != null) { - painlessClassName = struct.name; + painlessClassName = PainlessLookupUtility.typeToCanonicalTypeName(objectToExplain.getClass()); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 57b18bc60da..24dcf0ebdba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -19,47 +19,38 @@ package org.elasticsearch.painless.lookup; -import org.objectweb.asm.Type; - import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; public final class PainlessClass { - public final String name; - public final Class clazz; - public final Type type; - public final Map constructors; public final Map staticMethods; public final Map methods; - public final Map staticMembers; - public final Map members; + public final Map staticFields; + public final Map fields; - public final Map getters; - public final Map setters; + public final Map getterMethodHandles; + public final Map setterMethodHandles; public final PainlessMethod functionalMethod; - PainlessClass(String name, Class clazz, Type type, - Map constructors, Map staticMethods, Map methods, - Map staticMembers, Map members, - Map getters, Map setters, - PainlessMethod functionalMethod) { - this.name = name; - this.clazz = clazz; - this.type = type; + PainlessClass(Map constructors, + Map staticMethods, Map methods, + Map staticFields, Map fields, + Map getterMethodHandles, Map setterMethodHandles, + PainlessMethod functionalMethod) { this.constructors = Collections.unmodifiableMap(constructors); this.staticMethods = Collections.unmodifiableMap(staticMethods); this.methods = Collections.unmodifiableMap(methods); - this.staticMembers = Collections.unmodifiableMap(staticMembers); - this.members = Collections.unmodifiableMap(members); + this.staticFields = Collections.unmodifiableMap(staticFields); + this.fields = Collections.unmodifiableMap(fields); - this.getters = Collections.unmodifiableMap(getters); - this.setters = Collections.unmodifiableMap(setters); + this.getterMethodHandles = Collections.unmodifiableMap(getterMethodHandles); + this.setterMethodHandles = Collections.unmodifiableMap(setterMethodHandles); this.functionalMethod = functionalMethod; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 0eda3660f0b..2f41ed5dca8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -19,52 +19,39 @@ package org.elasticsearch.painless.lookup; -import org.objectweb.asm.Type; - import java.lang.invoke.MethodHandle; import java.util.HashMap; import java.util.Map; final class PainlessClassBuilder { - final String name; - final Class clazz; - final Type type; - final Map constructors; final Map staticMethods; final Map methods; - final Map staticMembers; - final Map members; + final Map staticFields; + final Map fields; - final Map getters; - final Map setters; + final Map getterMethodHandles; + final Map setterMethodHandles; PainlessMethod functionalMethod; - PainlessClassBuilder(String name, Class clazz, Type type) { - this.name = name; - this.clazz = clazz; - this.type = type; - + PainlessClassBuilder() { constructors = new HashMap<>(); staticMethods = new HashMap<>(); methods = new HashMap<>(); - staticMembers = new HashMap<>(); - members = new HashMap<>(); + staticFields = new HashMap<>(); + fields = new HashMap<>(); - getters = new HashMap<>(); - setters = new HashMap<>(); + getterMethodHandles = new HashMap<>(); + setterMethodHandles = new HashMap<>(); functionalMethod = null; } PainlessClass build() { - return new PainlessClass(name, clazz, type, - constructors, staticMethods, methods, - staticMembers, members, - getters, setters, - functionalMethod); + return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, + getterMethodHandles, setterMethodHandles, functionalMethod); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index bcecd7bbdc7..67c04498a58 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -29,8 +29,8 @@ import java.util.Map; */ public final class PainlessLookup { - public Collection getStructs() { - return classesToPainlessClasses.values(); + public Collection> getStructs() { + return classesToPainlessClasses.keySet(); } private final Map> canonicalClassNamesToClasses; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 519227bb901..3675cc7cd0f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -179,8 +179,7 @@ public class PainlessLookupBuilder { classesToPainlessClassBuilders = new HashMap<>(); canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class); - classesToPainlessClassBuilders.put(def.class, - new PainlessClassBuilder(DEF_CLASS_NAME, Object.class, org.objectweb.asm.Type.getType(Object.class))); + classesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder()); } private Class canonicalTypeNameToType(String canonicalTypeName) { @@ -234,17 +233,21 @@ public class PainlessLookupBuilder { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } + + Class existingClass = canonicalClassNamesToClasses.get(typeToCanonicalTypeName(clazz)); + + if (existingClass != null && existingClass != clazz) { + throw new IllegalArgumentException("class [" + canonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); + } + PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz); if (existingPainlessClassBuilder == null) { - PainlessClassBuilder painlessClassBuilder = - new PainlessClassBuilder(canonicalClassName, clazz, org.objectweb.asm.Type.getType(clazz)); + PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(); canonicalClassNamesToClasses.put(canonicalClassName, clazz); classesToPainlessClassBuilders.put(clazz, painlessClassBuilder); - } else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) { - throw new IllegalArgumentException("class [" + canonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); } String javaClassName = clazz.getName(); @@ -265,7 +268,7 @@ public class PainlessLookupBuilder { canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessClass.equals(clazz) == false) { + } else if (importedPainlessClass != clazz) { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); } else if (importClassName == false) { @@ -504,10 +507,10 @@ public class PainlessLookupBuilder { if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); - MethodHandle javaMethodHandle; + MethodHandle methodHandle; try { - javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); @@ -516,7 +519,7 @@ public class PainlessLookupBuilder { painlessMethod = painlessMethodCache.computeIfAbsent( new PainlessMethodCacheKey(targetClass, methodName, typeParameters), key -> new PainlessMethod(methodName, targetClass, null, returnType, - typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + typeParameters, asmMethod, javaMethod.getModifiers(), methodHandle)); painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && @@ -535,18 +538,18 @@ public class PainlessLookupBuilder { if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); - MethodHandle javaMethodHandle; + MethodHandle methodHandle; if (augmentedClass == null) { try { - javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); } } else { try { - javaMethodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + @@ -557,7 +560,7 @@ public class PainlessLookupBuilder { painlessMethod = painlessMethodCache.computeIfAbsent( new PainlessMethodCacheKey(targetClass, methodName, typeParameters), key -> new PainlessMethod(methodName, targetClass, augmentedClass, returnType, - typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + typeParameters, asmMethod, javaMethod.getModifiers(), methodHandle)); painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && @@ -650,7 +653,7 @@ public class PainlessLookupBuilder { throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final"); } - PainlessField painlessField = painlessClassBuilder.staticMembers.get(painlessFieldKey); + PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( @@ -658,7 +661,7 @@ public class PainlessLookupBuilder { key -> new PainlessField(fieldName, javaField.getName(), targetClass, typeParameter, javaField.getModifiers(), null, null)); - painlessClassBuilder.staticMembers.put(painlessFieldKey, painlessField); + painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField); } else if (painlessField.clazz != typeParameter) { throw new IllegalArgumentException("cannot have static fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + @@ -674,7 +677,7 @@ public class PainlessLookupBuilder { methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); } catch (IllegalAccessException iae) { throw new IllegalArgumentException( - "method handle getter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + "getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } MethodHandle methodHandleSetter; @@ -683,10 +686,10 @@ public class PainlessLookupBuilder { methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); } catch (IllegalAccessException iae) { throw new IllegalArgumentException( - "method handle setter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = painlessClassBuilder.members.get(painlessFieldKey); + PainlessField painlessField = painlessClassBuilder.fields.get(painlessFieldKey); if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( @@ -694,7 +697,7 @@ public class PainlessLookupBuilder { key -> new PainlessField(fieldName, javaField.getName(), targetClass, typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter)); - painlessClassBuilder.members.put(fieldName, painlessField); + painlessClassBuilder.fields.put(fieldName, painlessField); } else if (painlessField.clazz != typeParameter) { throw new IllegalArgumentException("cannot have fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + @@ -771,14 +774,14 @@ public class PainlessLookupBuilder { } } - for (Map.Entry painlessFieldEntry : originalPainlessClassBuilder.members.entrySet()) { + for (Map.Entry painlessFieldEntry : originalPainlessClassBuilder.fields.entrySet()) { String painlessFieldKey = painlessFieldEntry.getKey(); PainlessField newPainlessField = painlessFieldEntry.getValue(); - PainlessField existingPainlessField = targetPainlessClassBuilder.members.get(painlessFieldKey); + PainlessField existingPainlessField = targetPainlessClassBuilder.fields.get(painlessFieldKey); if (existingPainlessField == null || existingPainlessField.target != newPainlessField.target && existingPainlessField.target.isAssignableFrom(newPainlessField.target)) { - targetPainlessClassBuilder.members.put(painlessFieldKey, newPainlessField); + targetPainlessClassBuilder.fields.put(painlessFieldKey, newPainlessField); } } } @@ -796,34 +799,32 @@ public class PainlessLookupBuilder { if (typeParametersSize == 0 && methodName.startsWith("get") && methodName.length() > 3 && Character.isUpperCase(methodName.charAt(3))) { - painlessClassBuilder.getters.putIfAbsent( + painlessClassBuilder.getterMethodHandles.putIfAbsent( Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle); } else if (typeParametersSize == 0 && methodName.startsWith("is") && methodName.length() > 2 && Character.isUpperCase(methodName.charAt(2))) { - painlessClassBuilder.getters.putIfAbsent( + painlessClassBuilder.getterMethodHandles.putIfAbsent( Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), painlessMethod.handle); } else if (typeParametersSize == 1 && methodName.startsWith("set") && methodName.length() > 3 && Character.isUpperCase(methodName.charAt(3))) { - painlessClassBuilder.setters.putIfAbsent( + painlessClassBuilder.setterMethodHandles.putIfAbsent( Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle); } } - for (PainlessField painlessField : painlessClassBuilder.members.values()) { - painlessClassBuilder.getters.put(painlessField.name, painlessField.getter); - painlessClassBuilder.setters.put(painlessField.name, painlessField.setter); + for (PainlessField painlessField : painlessClassBuilder.fields.values()) { + painlessClassBuilder.getterMethodHandles.put(painlessField.name, painlessField.getter); + painlessClassBuilder.setterMethodHandles.put(painlessField.name, painlessField.setter); } } private void setFunctionalInterfaceMethods() { for (Map.Entry, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) { - setFunctionalInterfaceMethod(painlessClassBuilderEntry.getValue()); + setFunctionalInterfaceMethod(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue()); } } - private void setFunctionalInterfaceMethod(PainlessClassBuilder painlessClassBuilder) { - Class targetClass = painlessClassBuilder.clazz; - + private void setFunctionalInterfaceMethod(Class targetClass, PainlessClassBuilder painlessClassBuilder) { if (targetClass.isInterface()) { List javaMethods = new ArrayList<>(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index c0d4433f7fb..f092a17c9fc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -72,8 +72,9 @@ public final class ENewObj extends AExpression { constructor.arguments.toArray(types); if (constructor.arguments.size() != arguments.size()) { - throw createError(new IllegalArgumentException("When calling constructor on type [" + struct.name + "]" + - " expected [" + constructor.arguments.size() + "] arguments, but found [" + arguments.size() + "].")); + throw createError(new IllegalArgumentException( + "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + + "expected [" + constructor.arguments.size() + "] arguments, but found [" + arguments.size() + "].")); } for (int argument = 0; argument < arguments.size(); ++argument) { @@ -87,7 +88,8 @@ public final class ENewObj extends AExpression { statement = true; } else { - throw createError(new IllegalArgumentException("Unknown new call on type [" + struct.name + "].")); + throw createError(new IllegalArgumentException( + "Unknown new call on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 7b55cb5a804..26471f67f65 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -63,9 +63,9 @@ public final class PBrace extends AStoreable { } else if (prefix.actual == def.class) { sub = new PSubDefArray(location, index); } else if (Map.class.isAssignableFrom(prefix.actual)) { - sub = new PSubMapShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); + sub = new PSubMapShortcut(location, prefix.actual, index); } else if (List.class.isAssignableFrom(prefix.actual)) { - sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); + sub = new PSubListShortcut(location, prefix.actual, index); } else { throw createError(new IllegalArgumentException("Illegal array access on type " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 8fc8a612b84..56bc18eadbd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -84,8 +84,8 @@ public final class PCallInvoke extends AExpression { } else if (prefix.actual == def.class) { sub = new PSubDefCall(location, name, arguments); } else { - throw createError(new IllegalArgumentException( - "Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments " + + "on type [" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index abf398d0e67..b322d5b1f28 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -68,7 +68,7 @@ public final class PField extends AStoreable { sub = new PSubDefField(location, value); } else { PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); - PainlessField field = prefix instanceof EStatic ? struct.staticMembers.get(value) : struct.members.get(value); + PainlessField field = prefix instanceof EStatic ? struct.staticFields.get(value) : struct.fields.get(value); if (field != null) { sub = new PSubField(location, field); @@ -92,11 +92,11 @@ public final class PField extends AStoreable { index.analyze(locals); if (Map.class.isAssignableFrom(prefix.actual)) { - sub = new PSubMapShortcut(location, struct, index); + sub = new PSubMapShortcut(location, prefix.actual, index); } if (List.class.isAssignableFrom(prefix.actual)) { - sub = new PSubListShortcut(location, struct, index); + sub = new PSubListShortcut(location, prefix.actual, index); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 3841b1fece1..509aad64153 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -36,16 +36,16 @@ import java.util.Set; */ final class PSubListShortcut extends AStoreable { - private final PainlessClass struct; + private final Class targetClass; private AExpression index; private PainlessMethod getter; private PainlessMethod setter; - PSubListShortcut(Location location, PainlessClass struct, AExpression index) { + PSubListShortcut(Location location, Class targetClass, AExpression index) { super(location); - this.struct = Objects.requireNonNull(struct); + this.targetClass = Objects.requireNonNull(targetClass); this.index = Objects.requireNonNull(index); } @@ -56,16 +56,19 @@ final class PSubListShortcut extends AStoreable { @Override void analyze(Locals locals) { + PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(targetClass); + String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 || getter.arguments.get(0) != int.class)) { - throw createError(new IllegalArgumentException("Illegal list get shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal list get shortcut for type [" + canonicalClassName + "].")); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0) != int.class)) { - throw createError(new IllegalArgumentException("Illegal list set shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal list set shortcut for type [" + canonicalClassName + "].")); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) @@ -80,7 +83,7 @@ final class PSubListShortcut extends AStoreable { actual = setter != null ? setter.arguments.get(1) : getter.rtn; } else { - throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + canonicalClassName + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 13a3b9c9b94..2d7f2250c6c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -35,16 +35,16 @@ import java.util.Set; */ final class PSubMapShortcut extends AStoreable { - private final PainlessClass struct; + private final Class targetClass; private AExpression index; private PainlessMethod getter; private PainlessMethod setter; - PSubMapShortcut(Location location, PainlessClass struct, AExpression index) { + PSubMapShortcut(Location location, Class targetClass, AExpression index) { super(location); - this.struct = Objects.requireNonNull(struct); + this.targetClass = Objects.requireNonNull(targetClass); this.index = Objects.requireNonNull(index); } @@ -55,15 +55,18 @@ final class PSubMapShortcut extends AStoreable { @Override void analyze(Locals locals) { + PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(targetClass); + String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) { - throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "].")); } if (setter != null && setter.arguments.size() != 2) { - throw createError(new IllegalArgumentException("Illegal map set shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal map set shortcut for type [" + canonicalClassName + "].")); } if (getter != null && setter != null && @@ -78,7 +81,7 @@ final class PSubMapShortcut extends AStoreable { actual = setter != null ? setter.arguments.get(1) : getter.rtn; } else { - throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + canonicalClassName + "].")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index b2cc5e48ad8..ff0d4231175 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -29,6 +29,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.spi.Whitelist; import java.io.IOException; @@ -71,52 +72,54 @@ public class PainlessDocGenerator { Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); - List structs = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); - for (PainlessClass struct : structs) { - if (struct.clazz.isPrimitive()) { + List> classes = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(Class::getCanonicalName)).collect(toList()); + for (Class clazz : classes) { + PainlessClass struct = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz); + String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(clazz); + + if (clazz.isPrimitive()) { // Primitives don't have methods to reference continue; } - if ("def".equals(struct.name)) { + if (clazz == def.class) { // def is special but doesn't have any methods all of its own. continue; } indexStream.print("include::"); - indexStream.print(struct.name); + indexStream.print(canonicalClassName); indexStream.println(".asciidoc[]"); - Path typePath = apiRootPath.resolve(struct.name + ".asciidoc"); - logger.info("Writing [{}.asciidoc]", struct.name); + Path typePath = apiRootPath.resolve(canonicalClassName + ".asciidoc"); + logger.info("Writing [{}.asciidoc]", canonicalClassName); try (PrintStream typeStream = new PrintStream( Files.newOutputStream(typePath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(typeStream); typeStream.print("[["); - emitAnchor(typeStream, struct.clazz); + emitAnchor(typeStream, clazz); typeStream.print("]]++"); - typeStream.print(struct.name); + typeStream.print(canonicalClassName); typeStream.println("++::"); Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); - struct.staticMembers.values().stream().sorted(FIELD_NAME).forEach(documentField); - struct.members.values().stream().sorted(FIELD_NAME).forEach(documentField); + struct.staticFields.values().stream().sorted(FIELD_NAME).forEach(documentField); + struct.fields.values().stream().sorted(FIELD_NAME).forEach(documentField); struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); - Map inherited = new TreeMap<>(); + Map> inherited = new TreeMap<>(); struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { - if (method.target == struct.clazz) { + if (method.target == clazz) { documentMethod(typeStream, method); } else { - PainlessClass painlessClass = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(method.target); - inherited.put(painlessClass.name, painlessClass); + inherited.put(canonicalClassName, method.target); } }); if (false == inherited.isEmpty()) { typeStream.print("* Inherits methods from "); boolean first = true; - for (PainlessClass inheritsFrom : inherited.values()) { + for (Class inheritsFrom : inherited.values()) { if (first) { first = false; } else { @@ -242,7 +245,7 @@ public class PainlessDocGenerator { an internal link with the text. */ private static void emitType(PrintStream stream, Class clazz) { - emitStruct(stream, PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz)); + emitStruct(stream, clazz); while ((clazz = clazz.getComponentType()) != null) { stream.print("[]"); } @@ -252,15 +255,17 @@ public class PainlessDocGenerator { * Emit a {@link PainlessClass}. If the {@linkplain PainlessClass} is primitive or def this just emits the name of the struct. * Otherwise this emits an internal link with the name. */ - private static void emitStruct(PrintStream stream, PainlessClass struct) { - if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { + private static void emitStruct(PrintStream stream, Class clazz) { + String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(clazz); + + if (false == clazz.isPrimitive() && clazz != def.class) { stream.print("<<"); - emitAnchor(stream, struct.clazz); + emitAnchor(stream, clazz); stream.print(','); - stream.print(struct.name); + stream.print(canonicalClassName); stream.print(">>"); } else { - stream.print(struct.name); + stream.print(canonicalClassName); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 84452b4843d..c64014d81a5 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -460,7 +460,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubField() { Location l = new Location(getTestName(), 0); PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(Boolean.class); - PainlessField f = s.staticMembers.get("TRUE"); + PainlessField f = s.staticFields.get("TRUE"); PSubField node = new PSubField(l, f); node.prefix = new EStatic(l, "Boolean"); assertEquals("(PSubField (EStatic Boolean) TRUE)", node.toString()); @@ -469,32 +469,28 @@ public class NodeToStringTests extends ESTestCase { public void testPSubListShortcut() { Location l = new Location(getTestName(), 0); - PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(List.class); - PSubListShortcut node = new PSubListShortcut(l, s, new EConstant(l, 1)); + PSubListShortcut node = new PSubListShortcut(l, List.class, new EConstant(l, 1)); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EConstant Integer 1))", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubListShortcut (EVariable a) (EConstant Integer 1)))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 0); - s = painlessLookup.getPainlessStructFromJavaClass(List.class); - node = new PSubListShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); + node = new PSubListShortcut(l, List.class, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); } public void testPSubMapShortcut() { Location l = new Location(getTestName(), 0); - PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(Map.class); - PSubMapShortcut node = new PSubMapShortcut(l, s, new EConstant(l, "cat")); + PSubMapShortcut node = new PSubMapShortcut(l, Map.class, new EConstant(l, "cat")); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EConstant String 'cat'))", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubMapShortcut (EVariable a) (EConstant String 'cat')))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - s = painlessLookup.getPainlessStructFromJavaClass(Map.class); - node = new PSubMapShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); + node = new PSubMapShortcut(l, Map.class, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java index dd7b42487fd..40da4b53227 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java @@ -45,12 +45,10 @@ public class FeatureQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"feature\" : {\n" + " \"field\": \"my_feature_field\"\n" + @@ -110,7 +107,6 @@ public class FeatureQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"feature\" : {\n" + " \"field\": \"" + STRING_FIELD_NAME + "\"\n" + @@ -121,7 +117,6 @@ public class FeatureQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"feature\" : {\n" + " \"field\": \"my_negative_feature_field\",\n" + diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java index 3dad3c8a437..78ea9decd1d 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java @@ -24,6 +24,7 @@ import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; @@ -50,7 +51,7 @@ public class Netty4CorsHandler extends ChannelDuplexHandler { private static Pattern SCHEME_PATTERN = Pattern.compile("^https?://"); private final Netty4CorsConfig config; - private HttpRequest request; + private FullHttpRequest request; /** * Creates a new instance with the specified {@link Netty4CorsConfig}. @@ -64,15 +65,24 @@ public class Netty4CorsHandler extends ChannelDuplexHandler { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (config.isCorsSupportEnabled() && msg instanceof HttpRequest) { - request = (HttpRequest) msg; + assert msg instanceof FullHttpRequest : "Invalid message type: " + msg.getClass(); + if (config.isCorsSupportEnabled()) { + request = (FullHttpRequest) msg; if (isPreflightRequest(request)) { - handlePreflight(ctx, request); - return; + try { + handlePreflight(ctx, request); + return; + } finally { + releaseRequest(); + } } if (config.isShortCircuit() && !validateOrigin()) { - forbidden(ctx, request); - return; + try { + forbidden(ctx, request); + return; + } finally { + releaseRequest(); + } } } ctx.fireChannelRead(msg); @@ -123,6 +133,11 @@ public class Netty4CorsHandler extends ChannelDuplexHandler { } } + private void releaseRequest() { + request.release(); + request = null; + } + private static void forbidden(final ChannelHandlerContext ctx, final HttpRequest request) { ctx.writeAndFlush(new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.FORBIDDEN)) .addListener(ChannelFutureListener.CLOSE); diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 4d6f9310cdd..ad5a7b7c57b 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -30,7 +30,7 @@ forbiddenApis { dependencies { compile "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}" - compile 'com.ibm.icu:icu4j:61.1' + compile 'com.ibm.icu:icu4j:62.1' } dependencyLicenses { diff --git a/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1 deleted file mode 100644 index 7b7fcfe1c79..00000000000 --- a/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -28d33b5e44e72edcc66a5da7a34a42147f38d987 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 new file mode 100644 index 00000000000..c24c69cf4b9 --- /dev/null +++ b/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 @@ -0,0 +1 @@ +7a4d00d5ec5febd252a6182e8b6e87a0a9821f81 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 deleted file mode 100644 index b5291b30c7d..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -394e811e9d9bf0b9fba837f7ceca9e8f3e39d1c2 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..5b6947a9c75 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +7a37816def72a748416c4ae8b0f6817e30efb99f \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 deleted file mode 100644 index 49f55bea5e6..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5cd56acfa16ba20e19b5d21d90b510eada841431 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..d39638c1884 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +ca7437178cdbf7b8bfe0d75c75e3c8eb93925724 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 deleted file mode 100644 index c4b61b763b4..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db7b56f4cf533ad9022d2312c5ee48331edccca3 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..21c25d2bb24 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +3f5dec44f380d6d58bc1c8aec51964fcb5390b60 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 deleted file mode 100644 index 779cac97612..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8dba4d28a595eab2e8fb6095d1ac5f2d3872144 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..f58c597eadd --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +453bf1d60df0415439095624e0b3e42492ad4716 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 deleted file mode 100644 index cf5c49a2759..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1243c771ee824c46a3d66ae3e4256d919fc06fbe \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..8ccec8dbf37 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +70095a45257bca9f46629b5fb6cedf9eff5e2b07 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 deleted file mode 100644 index 830b9ccf9cb..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c783794b0d20d8dc1285edc7701f386b1f0e2fb8 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..ec9c33119f5 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +7199d6962d268b7877f7b5160e98e4ff21cce5c7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 deleted file mode 100644 index a96e05f5e3b..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9438efa504a89afb6cb4c66448c257f865164d23 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..ba9148ef1b3 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +12aff508d39d206a1aead5013ecd11882062eb06 \ No newline at end of file diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/plugin/store/smb/SMBStorePlugin.java b/plugins/store-smb/src/main/java/org/elasticsearch/plugin/store/smb/SMBStorePlugin.java index 241f3d77e8b..a2b0f463452 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/plugin/store/smb/SMBStorePlugin.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/plugin/store/smb/SMBStorePlugin.java @@ -19,16 +19,26 @@ package org.elasticsearch.plugin.store.smb; -import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.smbmmapfs.SmbMmapFsIndexStore; import org.elasticsearch.index.store.smbsimplefs.SmbSimpleFsIndexStore; +import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; -public class SMBStorePlugin extends Plugin { +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +public class SMBStorePlugin extends Plugin implements IndexStorePlugin { @Override - public void onIndexModule(IndexModule indexModule) { - indexModule.addIndexStore("smb_mmap_fs", SmbMmapFsIndexStore::new); - indexModule.addIndexStore("smb_simple_fs", SmbSimpleFsIndexStore::new); + public Map> getIndexStoreFactories() { + final Map> indexStoreFactories = new HashMap<>(2); + indexStoreFactories.put("smb_mmap_fs", SmbMmapFsIndexStore::new); + indexStoreFactories.put("smb_simple_fs", SmbSimpleFsIndexStore::new); + return Collections.unmodifiableMap(indexStoreFactories); } + } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbMMapDirectoryTests.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbMMapDirectoryTests.java index 3dad35adaa2..f4f6bed7f04 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbMMapDirectoryTests.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbMMapDirectoryTests.java @@ -30,4 +30,12 @@ public class SmbMMapDirectoryTests extends EsBaseDirectoryTestCase { protected Directory getDirectory(Path file) throws IOException { return new SmbDirectoryWrapper(new MMapDirectory(file)); } + + @Override + public void testCreateOutputForExistingFile() throws IOException { + /** + * This test is disabled because {@link SmbDirectoryWrapper} opens existing file + * with an explicit StandardOpenOption.TRUNCATE_EXISTING option. + */ + } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbSimpleFSDirectoryTests.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbSimpleFSDirectoryTests.java index 659f6eff7ac..c20b3bc2863 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbSimpleFSDirectoryTests.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SmbSimpleFSDirectoryTests.java @@ -30,4 +30,12 @@ public class SmbSimpleFSDirectoryTests extends EsBaseDirectoryTestCase { protected Directory getDirectory(Path file) throws IOException { return new SmbDirectoryWrapper(new SimpleFSDirectory(file)); } + + @Override + public void testCreateOutputForExistingFile() throws IOException { + /** + * This test is disabled because {@link SmbDirectoryWrapper} opens existing file + * with an explicit StandardOpenOption.TRUNCATE_EXISTING option. + */ + } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/cors/NioCorsHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/cors/NioCorsHandler.java index 98ae2d523ca..5a9d114d675 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/cors/NioCorsHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/cors/NioCorsHandler.java @@ -24,6 +24,7 @@ import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; @@ -50,7 +51,7 @@ public class NioCorsHandler extends ChannelDuplexHandler { private static Pattern SCHEME_PATTERN = Pattern.compile("^https?://"); private final NioCorsConfig config; - private HttpRequest request; + private FullHttpRequest request; /** * Creates a new instance with the specified {@link NioCorsConfig}. @@ -64,15 +65,24 @@ public class NioCorsHandler extends ChannelDuplexHandler { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (config.isCorsSupportEnabled() && msg instanceof HttpRequest) { - request = (HttpRequest) msg; + assert msg instanceof FullHttpRequest : "Invalid message type: " + msg.getClass(); + if (config.isCorsSupportEnabled()) { + request = (FullHttpRequest) msg; if (isPreflightRequest(request)) { - handlePreflight(ctx, request); - return; + try { + handlePreflight(ctx, request); + return; + } finally { + releaseRequest(); + } } if (config.isShortCircuit() && !validateOrigin()) { - forbidden(ctx, request); - return; + try { + forbidden(ctx, request); + return; + } finally { + releaseRequest(); + } } } ctx.fireChannelRead(msg); @@ -109,6 +119,11 @@ public class NioCorsHandler extends ChannelDuplexHandler { } } + private void releaseRequest() { + request.release(); + request = null; + } + private void handlePreflight(final ChannelHandlerContext ctx, final HttpRequest request) { final HttpResponse response = new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.OK, true, true); if (setOrigin(response)) { diff --git a/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 deleted file mode 100644 index 928cc6dea04..00000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1afb580df500626a1c695e0fc9a7e8a8f58bcac \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..8b2a098a3a2 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +d27958843ca118db2ffd2c242ae3761bd5a47328 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 deleted file mode 100644 index a94663119e7..00000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6ad941ef1fdad48673ed511631b7e48a9456bf7 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..d8496a0a86a --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +7ea220ba8e4accb8b04e280463042ad470e23bc0 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0.jar.sha1 b/server/licenses/lucene-core-7.4.0.jar.sha1 deleted file mode 100644 index 80ba6c76aa3..00000000000 --- a/server/licenses/lucene-core-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..d38fb392c35 --- /dev/null +++ b/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0.jar.sha1 b/server/licenses/lucene-grouping-7.4.0.jar.sha1 deleted file mode 100644 index 5b781d26829..00000000000 --- a/server/licenses/lucene-grouping-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56f99858a4421a517b52da36a222debcccab80c6 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..7f83082fa0c --- /dev/null +++ b/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +f0af947c60d24f779c22f774e81ebd7dd91cc932 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 deleted file mode 100644 index e1ebb95fe1b..00000000000 --- a/server/licenses/lucene-highlighter-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5266b45d7f049662817d739881765904621876d0 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..6b9f2cb724d --- /dev/null +++ b/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +fbc83ac5a0139ed7e7faf6c95a2718f46f28c641 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0.jar.sha1 b/server/licenses/lucene-join-7.4.0.jar.sha1 deleted file mode 100644 index ff81c33c3f8..00000000000 --- a/server/licenses/lucene-join-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c77154d18c4944ceb6ce0741060632f57d623fdc \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..a085943140e --- /dev/null +++ b/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +30adfe493982b0db059dc243e269eea38d850d46 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0.jar.sha1 b/server/licenses/lucene-memory-7.4.0.jar.sha1 deleted file mode 100644 index 7c0117dff6b..00000000000 --- a/server/licenses/lucene-memory-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -186ff981feec1bdbf1a6236e786ec171b5fbe3e0 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..7acc70be151 --- /dev/null +++ b/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +656f304261d9aad05070fb68593beffafe9147e3 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0.jar.sha1 b/server/licenses/lucene-misc-7.4.0.jar.sha1 deleted file mode 100644 index 5cdf6810fa5..00000000000 --- a/server/licenses/lucene-misc-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf844bb6f6d84da19e8c79ce5fbb4cf6d00f2611 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..e46b138ba7b --- /dev/null +++ b/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +8bf22ad81a7480c255b55bada401eb131bfdb4df \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0.jar.sha1 b/server/licenses/lucene-queries-7.4.0.jar.sha1 deleted file mode 100644 index 19889037937..00000000000 --- a/server/licenses/lucene-queries-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -229a50e6d9d4db076f671c230d493000c6e2972c \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..a7114feef62 --- /dev/null +++ b/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +edb3de4d68a34c1e1ca08f79fe4d103b10e98ad1 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 deleted file mode 100644 index afdc275afe2..00000000000 --- a/server/licenses/lucene-queryparser-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e58add0d0c39df97d07c8e343041989bf4b3a3f \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..cf3011c9a45 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +7ece30d5f1e18d96f61644451c858c3d9960558f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 deleted file mode 100644 index 81ae3bddd07..00000000000 --- a/server/licenses/lucene-sandbox-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1692604fa06a945d1ee19939022ef1a912235db3 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..30513e58bf6 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +ad3bd0c2ed96556193c7215bef328e689d0b157f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-7.4.0.jar.sha1 deleted file mode 100644 index cc3f31340b9..00000000000 --- a/server/licenses/lucene-spatial-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -847d2f897961124e2fc7d5e55d8309635bb026bc \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..6146b055c13 --- /dev/null +++ b/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +8a6bd97e39ee5af60126adbe8c8375dc41b1ea8e \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 deleted file mode 100644 index 3f05790e430..00000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -586892eefc0546643d7f5d7f83659c7db0d534ff \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..c812f044927 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +07e748d2d80000a7a213f3405b82b6e26b452948 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 deleted file mode 100644 index 8c767b16c53..00000000000 --- a/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -32cd2854f39ff453a5d128ce40e11eea4168abbf \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..b5ad83ac9fe --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +fd737bd5562f3943618ee7e73a0aaffb6319fdb2 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0.jar.sha1 b/server/licenses/lucene-suggest-7.4.0.jar.sha1 deleted file mode 100644 index 59d59cf7941..00000000000 --- a/server/licenses/lucene-suggest-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0cdc1a512032f8b23dd4b1add0f5cd06325addc3 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..452b96420f8 --- /dev/null +++ b/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +ff3f260d1dc8c18bc67f3c33aa84a0ad290daac5 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1fefbad0d74..a815a9711d0 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -179,10 +179,10 @@ public class Version implements Comparable, ToXContentFragment { public static final int V_6_4_0_ID = 6040099; public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_5_0_ID = 6050099; - public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); + public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final int V_7_0_0_alpha1_ID = 7000001; public static final Version V_7_0_0_alpha1 = - new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); + new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final Version CURRENT = V_7_0_0_alpha1; static { diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index b232d849223..2be4e5bf053 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.support.nodes; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.NoSuchNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; @@ -179,37 +178,33 @@ public abstract class TransportNodesAction() { - @Override - public NodeResponse newInstance() { - return newNodeResponse(); - } - - @Override - public void handleResponse(NodeResponse response) { - onOperation(idx, response); - } - - @Override - public void handleException(TransportException exp) { - onFailure(idx, node.getId(), exp); - } - - @Override - public String executor() { - return ThreadPool.Names.SAME; - } - }); + TransportRequest nodeRequest = newNodeRequest(nodeId, request); + if (task != null) { + nodeRequest.setParentTask(clusterService.localNode().getId(), task.getId()); } + + transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), + new TransportResponseHandler() { + @Override + public NodeResponse newInstance() { + return newNodeResponse(); + } + + @Override + public void handleResponse(NodeResponse response) { + onOperation(idx, response); + } + + @Override + public void handleException(TransportException exp) { + onFailure(idx, node.getId(), exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); } catch (Exception e) { onFailure(idx, nodeId, e); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java index af0e0248471..45ce2b610ca 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.geo.parsers; -import org.locationtech.jts.geom.Coordinate; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; @@ -29,6 +28,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.locationtech.jts.geom.Coordinate; import java.io.IOException; import java.util.ArrayList; @@ -130,10 +130,6 @@ abstract class GeoJsonParser { CircleBuilder.TYPE); } - if (shapeType == null) { - throw new ElasticsearchParseException("shape type [{}] not included", shapeType); - } - if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION)) { return geometryCollections; } diff --git a/server/src/main/java/org/elasticsearch/index/EsTieredMergePolicy.java b/server/src/main/java/org/elasticsearch/index/EsTieredMergePolicy.java index 27a83961903..68edf3a9b18 100644 --- a/server/src/main/java/org/elasticsearch/index/EsTieredMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/EsTieredMergePolicy.java @@ -52,7 +52,7 @@ final class EsTieredMergePolicy extends FilterMergePolicy { @Override public MergeSpecification findForcedDeletesMerges(SegmentInfos infos, MergeContext mergeContext) throws IOException { - return forcedMergePolicy.findForcedDeletesMerges(infos, mergeContext); + return forcedMergePolicy.findForcedDeletesMerges(infos, mergeContext); } public void setForceMergeDeletesPctAllowed(double forceMergeDeletesPctAllowed) { @@ -108,13 +108,4 @@ final class EsTieredMergePolicy extends FilterMergePolicy { public double getSegmentsPerTier() { return regularMergePolicy.getSegmentsPerTier(); } - - public void setReclaimDeletesWeight(double reclaimDeletesWeight) { - regularMergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - forcedMergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - } - - public double getReclaimDeletesWeight() { - return regularMergePolicy.getReclaimDeletesWeight(); - } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 9e859a16956..715b78b14ff 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -49,6 +49,7 @@ import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -74,7 +75,7 @@ import java.util.function.Function; * {@link #addSimilarity(String, TriFunction)} while existing Providers can be referenced through Settings under the * {@link IndexModule#SIMILARITY_SETTINGS_PREFIX} prefix along with the "type" value. For example, to reference the * {@link BM25Similarity}, the configuration {@code "index.similarity.my_similarity.type : "BM25"} can be used. - *
  • {@link IndexStore} - Custom {@link IndexStore} instances can be registered via {@link #addIndexStore(String, Function)}
  • + *
  • {@link IndexStore} - Custom {@link IndexStore} instances can be registered via {@link IndexStorePlugin}
  • *
  • {@link IndexEventListener} - Custom {@link IndexEventListener} instances can be registered via * {@link #addIndexEventListener(IndexEventListener)}
  • *
  • Settings update listener - Custom settings update listener can be registered via @@ -109,7 +110,7 @@ public final class IndexModule { private SetOnce indexSearcherWrapper = new SetOnce<>(); private final Set indexEventListeners = new HashSet<>(); private final Map> similarities = new HashMap<>(); - private final Map> storeTypes = new HashMap<>(); + private final Map> indexStoreFactories; private final SetOnce> forceQueryCacheProvider = new SetOnce<>(); private final List searchOperationListeners = new ArrayList<>(); private final List indexOperationListeners = new ArrayList<>(); @@ -119,16 +120,22 @@ public final class IndexModule { * Construct the index module for the index with the specified index settings. The index module contains extension points for plugins * via {@link org.elasticsearch.plugins.PluginsService#onIndexModule(IndexModule)}. * - * @param indexSettings the index settings - * @param analysisRegistry the analysis registry - * @param engineFactory the engine factory + * @param indexSettings the index settings + * @param analysisRegistry the analysis registry + * @param engineFactory the engine factory + * @param indexStoreFactories the available store types */ - public IndexModule(final IndexSettings indexSettings, final AnalysisRegistry analysisRegistry, final EngineFactory engineFactory) { + public IndexModule( + final IndexSettings indexSettings, + final AnalysisRegistry analysisRegistry, + final EngineFactory engineFactory, + final Map> indexStoreFactories) { this.indexSettings = indexSettings; this.analysisRegistry = analysisRegistry; this.engineFactory = Objects.requireNonNull(engineFactory); this.searchOperationListeners.add(new SearchSlowLog(indexSettings)); this.indexOperationListeners.add(new IndexingSlowLog(indexSettings)); + this.indexStoreFactories = Collections.unmodifiableMap(indexStoreFactories); } /** @@ -245,25 +252,6 @@ public final class IndexModule { this.indexOperationListeners.add(listener); } - /** - * Adds an {@link IndexStore} type to this index module. Typically stores are registered with a reference to - * it's constructor: - *
    -     *     indexModule.addIndexStore("my_store_type", MyStore::new);
    -     * 
    - * - * @param type the type to register - * @param provider the instance provider / factory method - */ - public void addIndexStore(String type, Function provider) { - ensureNotFrozen(); - if (storeTypes.containsKey(type)) { - throw new IllegalArgumentException("key [" + type +"] already registered"); - } - storeTypes.put(type, provider); - } - - /** * Registers the given {@link Similarity} with the given name. * The function takes as parameters:
      @@ -360,7 +348,7 @@ public final class IndexModule { if (Strings.isEmpty(storeType) || isBuiltinType(storeType)) { store = new IndexStore(indexSettings); } else { - Function factory = storeTypes.get(storeType); + Function factory = indexStoreFactories.get(storeType); if (factory == null) { throw new IllegalArgumentException("Unknown store type [" + storeType + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 08cacee6ae0..486515e6755 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -416,7 +416,6 @@ public final class IndexSettings { scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, mergePolicyConfig::setMaxMergesAtOnceExplicit); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, mergePolicyConfig::setMaxMergedSegment); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, mergePolicyConfig::setSegmentsPerTier); - scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, mergePolicyConfig::setReclaimDeletesWeight); scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, mergeSchedulerConfig::setMaxThreadAndMergeCount); diff --git a/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index b6c1e124a63..8a264cd3cb7 100644 --- a/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/server/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -150,7 +150,7 @@ public final class MergePolicyConfig { Property.Dynamic, Property.IndexScope); public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, - Property.Dynamic, Property.IndexScope); + Property.Dynamic, Property.IndexScope, Property.Deprecated); public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin @@ -176,17 +176,12 @@ public final class MergePolicyConfig { mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit); mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.getMbFrac()); mergePolicy.setSegmentsPerTier(segmentsPerTier); - mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); if (logger.isTraceEnabled()) { logger.trace("using [tiered] merge mergePolicy with expunge_deletes_allowed[{}], floor_segment[{}], max_merge_at_once[{}], max_merge_at_once_explicit[{}], max_merged_segment[{}], segments_per_tier[{}], reclaim_deletes_weight[{}]", forceMergeDeletesPctAllowed, floorSegment, maxMergeAtOnce, maxMergeAtOnceExplicit, maxMergedSegment, segmentsPerTier, reclaimDeletesWeight); } } - void setReclaimDeletesWeight(Double reclaimDeletesWeight) { - mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - } - void setSegmentsPerTier(Double segmentsPerTier) { mergePolicy.setSegmentsPerTier(segmentsPerTier); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 53a7baa60f6..b7c938b469f 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -800,6 +800,8 @@ public abstract class Engine implements Closeable { } catch (IOException e) { logger.trace(() -> new ParameterizedMessage("failed to get size for [{}]", info.info.name), e); } + segment.segmentSort = info.info.getIndexSort(); + segment.attributes = info.info.getAttributes(); segments.put(info.info.name, segment); } else { segment.committed = true; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 4f535f01da4..39346fecbef 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -100,6 +100,7 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexingOperationListener; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -181,6 +182,7 @@ public class IndicesService extends AbstractLifecycleComponent private final IndicesQueryCache indicesQueryCache; private final MetaStateService metaStateService; private final Collection>> engineFactoryProviders; + private final Map> indexStoreFactories; @Override protected void doStart() { @@ -193,7 +195,8 @@ public class IndicesService extends AbstractLifecycleComponent MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService, BigArrays bigArrays, ScriptService scriptService, Client client, MetaStateService metaStateService, - Collection>> engineFactoryProviders) { + Collection>> engineFactoryProviders, + Map> indexStoreFactories) { super(settings); this.threadPool = threadPool; this.pluginsService = pluginsService; @@ -225,6 +228,7 @@ public class IndicesService extends AbstractLifecycleComponent this.cacheCleaner = new CacheCleaner(indicesFieldDataCache, indicesRequestCache, logger, threadPool, this.cleanInterval); this.metaStateService = metaStateService; this.engineFactoryProviders = engineFactoryProviders; + this.indexStoreFactories = indexStoreFactories; } @Override @@ -464,7 +468,7 @@ public class IndicesService extends AbstractLifecycleComponent idxSettings.getNumberOfReplicas(), reason); - final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings)); + final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), indexStoreFactories); for (IndexingOperationListener operationListener : indexingOperationListeners) { indexModule.addIndexOperationListener(operationListener); } @@ -524,7 +528,7 @@ public class IndicesService extends AbstractLifecycleComponent */ public synchronized MapperService createIndexMapperService(IndexMetaData indexMetaData) throws IOException { final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexScopedSettings); - final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings)); + final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), indexStoreFactories); pluginsService.onIndexModule(indexModule); return indexModule.newIndexMapperService(xContentRegistry, mapperRegistry, scriptService); } diff --git a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 78dc0ec6bfe..2853842c646 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -361,7 +361,7 @@ public final class ConfigurationUtils { return readProcessor(processorFactories, type, (Map) config); } else if (config instanceof String && "script".equals(type)) { Map normalizedScript = new HashMap<>(1); - normalizedScript.put(ScriptType.INLINE.getName(), config); + normalizedScript.put(ScriptType.INLINE.getParseField().getPreferredName(), config); return readProcessor(processorFactories, type, normalizedScript); } else { throw newConfigurationException(type, null, null, diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 64bc55edb71..c1ce864223b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -26,8 +26,8 @@ import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.search.SearchExecutionStatsCollector; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchTransportService; @@ -94,6 +94,7 @@ import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule; @@ -117,6 +118,7 @@ import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MetaDataUpgrader; @@ -407,11 +409,19 @@ public class Node implements Closeable { enginePlugins.stream().map(plugin -> plugin::getEngineFactory)) .collect(Collectors.toList()); + + final Map> indexStoreFactories = + pluginsService.filterPlugins(IndexStorePlugin.class) + .stream() + .map(IndexStorePlugin::getIndexStoreFactories) + .flatMap(m -> m.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + final IndicesService indicesService = new IndicesService(settings, pluginsService, nodeEnvironment, xContentRegistry, analysisModule.getAnalysisRegistry(), clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, - scriptModule.getScriptService(), client, metaStateService, engineFactoryProviders); + scriptModule.getScriptService(), client, metaStateService, engineFactoryProviders, indexStoreFactories); Collection pluginComponents = pluginsService.filterPlugins(Plugin.class).stream() diff --git a/server/src/main/java/org/elasticsearch/plugins/IndexStorePlugin.java b/server/src/main/java/org/elasticsearch/plugins/IndexStorePlugin.java new file mode 100644 index 00000000000..16eec535e4b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/IndexStorePlugin.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.store.IndexStore; + +import java.util.Map; +import java.util.function.Function; + +/** + * A plugin that provides alternative index store implementations. + */ +public interface IndexStorePlugin { + + /** + * The index store factories for this plugin. When an index is created the store type setting + * {@link org.elasticsearch.index.IndexModule#INDEX_STORE_TYPE_SETTING} on the index will be examined and either use the default or a + * built-in type, or looked up among all the index store factories from {@link IndexStore} plugins. + * + * @return a map from store type to an index store factory + */ + Map> getIndexStoreFactories(); + +} diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 22743e38839..cc1d27425e1 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1554,11 +1554,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp filesToRecover.add(fileInfo); recoveryState.getIndex().addFileDetail(fileInfo.name(), fileInfo.length(), false); if (logger.isTraceEnabled()) { - if (md == null) { - logger.trace("[{}] [{}] recovering [{}] from [{}], does not exists in local store", shardId, snapshotId, fileInfo.physicalName(), fileInfo.name()); - } else { - logger.trace("[{}] [{}] recovering [{}] from [{}], exists in local store but is different", shardId, snapshotId, fileInfo.physicalName(), fileInfo.name()); - } + logger.trace("[{}] [{}] recovering [{}] from [{}], exists in local store but is different", shardId, snapshotId, + fileInfo.physicalName(), fileInfo.name()); } } diff --git a/server/src/test/java/org/elasticsearch/index/EsTieredMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/EsTieredMergePolicyTests.java index fdee707d97a..30f73b887f7 100644 --- a/server/src/test/java/org/elasticsearch/index/EsTieredMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/EsTieredMergePolicyTests.java @@ -29,16 +29,14 @@ public class EsTieredMergePolicyTests extends ESTestCase { assertEquals( new TieredMergePolicy().getMaxMergedSegmentMB(), policy.regularMergePolicy.getMaxMergedSegmentMB(), 0d); - // TODO: fix when incorporating https://issues.apache.org/jira/browse/LUCENE-8398, the first divisor must be a double - assertEquals(Long.MAX_VALUE / 1024 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d); + assertEquals(Long.MAX_VALUE / 1024.0 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d); } public void testSetMaxMergedSegmentMB() { EsTieredMergePolicy policy = new EsTieredMergePolicy(); policy.setMaxMergedSegmentMB(10 * 1024); assertEquals(10 * 1024, policy.regularMergePolicy.getMaxMergedSegmentMB(), 0d); - // TODO: fix when incorporating https://issues.apache.org/jira/browse/LUCENE-8398, the first divisor must be a double - assertEquals(Long.MAX_VALUE / 1024 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d); + assertEquals(Long.MAX_VALUE / 1024.0 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d); } public void testSetForceMergeDeletesPctAllowed() { @@ -71,10 +69,4 @@ public class EsTieredMergePolicyTests extends ESTestCase { policy.setSegmentsPerTier(42); assertEquals(42, policy.regularMergePolicy.getSegmentsPerTier(), 0); } - - public void testSetReclaimDeletesWeight() { - EsTieredMergePolicy policy = new EsTieredMergePolicy(); - policy.setReclaimDeletesWeight(42); - assertEquals(42, policy.regularMergePolicy.getReclaimDeletesWeight(), 0); - } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 1d531bdeb90..a82b932e2b5 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -81,10 +81,13 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.Collections; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.instanceOf; public class IndexModuleTests extends ESTestCase { private Index index; @@ -147,7 +150,8 @@ public class IndexModuleTests extends ESTestCase { } public void testWrapperIsBound() throws IOException { - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new MockEngineFactory(AssertingDirectoryReader.class)); + final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap()); module.setSearcherWrapper((s) -> new Wrapper()); IndexService indexService = newIndexService(module); @@ -164,18 +168,12 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") .build(); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory()); - module.addIndexStore("foo_store", FooStore::new); - try { - module.addIndexStore("foo_store", FooStore::new); - fail("already registered"); - } catch (IllegalArgumentException ex) { - // fine - } + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + final Map> indexStoreFactories = Collections.singletonMap("foo_store", FooStore::new); + final IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), indexStoreFactories); - IndexService indexService = newIndexService(module); - assertTrue(indexService.getIndexStore() instanceof FooStore); + final IndexService indexService = newIndexService(module); + assertThat(indexService.getIndexStore(), instanceOf(FooStore.class)); indexService.close("simon says", false); } @@ -189,7 +187,7 @@ public class IndexModuleTests extends ESTestCase { } }; IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory()); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); module.addIndexEventListener(eventListener); IndexService indexService = newIndexService(module); IndexSettings x = indexService.getIndexSettings(); @@ -204,7 +202,7 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { Setting booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, booleanSetting); - IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory()); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); Setting booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); @@ -223,8 +221,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddIndexOperationListener() throws IOException { - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); AtomicBoolean executed = new AtomicBoolean(false); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -254,8 +252,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddSearchOperationListener() throws IOException { - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); AtomicBoolean executed = new AtomicBoolean(false); SearchOperationListener listener = new SearchOperationListener() { @@ -288,8 +286,9 @@ public class IndexModuleTests extends ESTestCase { .put("index.similarity.my_similarity.key", "there is a key") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); module.addSimilarity("test_similarity", (providerSettings, indexCreatedVersion, scriptService) -> new TestSimilarity(providerSettings.get("key"))); @@ -303,8 +302,8 @@ public class IndexModuleTests extends ESTestCase { } public void testFrozen() { - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); module.freeze(); String msg = "Can't modify IndexModule once the index service has been created"; assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage()); @@ -313,7 +312,6 @@ public class IndexModuleTests extends ESTestCase { assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSimilarity(null, null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setSearcherWrapper(null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheProvider(null)).getMessage()); - assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexStore("foo", null)).getMessage()); } public void testSetupUnknownSimilarity() throws IOException { @@ -322,8 +320,9 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); Exception ex = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); assertEquals("Unknown Similarity type [test_similarity] for [my_similarity]", ex.getMessage()); } @@ -334,8 +333,8 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); Exception ex = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); assertEquals("Similarity [my_similarity] must have an associated type", ex.getMessage()); } @@ -344,8 +343,8 @@ public class IndexModuleTests extends ESTestCase { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); expectThrows(AlreadySetException.class, () -> module.forceQueryCacheProvider((a, b) -> new CustomQueryCache())); IndexService indexService = newIndexService(module); @@ -357,8 +356,8 @@ public class IndexModuleTests extends ESTestCase { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); IndexService indexService = newIndexService(module); assertTrue(indexService.cache().query() instanceof IndexQueryCache); indexService.close("simon says", false); @@ -369,8 +368,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = - new IndexModule(IndexSettingsModule.newIndexSettings("foo", settings), emptyAnalysisRegistry, new InternalEngineFactory()); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); IndexService indexService = newIndexService(module); assertTrue(indexService.cache().query() instanceof DisabledQueryCache); diff --git a/server/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java b/server/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java index d8b03ffeedb..a8370095564 100644 --- a/server/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java @@ -96,10 +96,6 @@ public class MergePolicySettingsTests extends ESTestCase { indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1)).build())); assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001); - assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); - indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING.getKey(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build())); - assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); - assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build())); assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); @@ -110,7 +106,6 @@ public class MergePolicySettingsTests extends ESTestCase { assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001); - assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); } diff --git a/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java index ddd3ddf486f..98a5d91e1b1 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java @@ -89,7 +89,6 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase 0); String queryAsString = "{\n" + " \"dis_max\":{\n" + " \"queries\":[\n" + diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index 88742e08554..ad02209bf5d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -68,11 +68,7 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern); Collection mappedFields = fields.stream().filter((field) -> context.getQueryShardContext().getObjectMapper(field) != null || context.getQueryShardContext().getMapperService().fullName(field) != null).collect(Collectors.toList()); - if (getCurrentTypes().length == 0) { - assertThat(query, instanceOf(MatchNoDocsQuery.class)); - MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; - assertThat(matchNoDocsQuery.toString(null), containsString("Missing types in \"exists\" query.")); - } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { + if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { if (fields.size() == 1) { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; diff --git a/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java index 0a67ab50ae8..0a0d567c7a4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -105,7 +105,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"fuzzy\":{\n" + " \"" + STRING_FIELD_NAME + "\":{\n" + @@ -128,7 +127,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"fuzzy\":{\n" + " \"" + STRING_FIELD_NAME + "\":{\n" + @@ -151,7 +149,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase 0); String queryMissingFuzzinessUpLimit = "{\n" + " \"fuzzy\":{\n" + " \"" + STRING_FIELD_NAME + "\":{\n" + @@ -214,7 +211,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"fuzzy\":{\n" + " \"" + INT_FIELD_NAME + "\":{\n" + @@ -299,7 +295,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase 0); Query query = new FuzzyQueryBuilder(STRING_FIELD_NAME, "text").toQuery(createShardContext()); assertThat(query, instanceOf(FuzzyQuery.class)); assertEquals(FuzzyQuery.defaultTranspositions, ((FuzzyQuery)query).getTranspositions()); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index d0d075af21a..551bd6488cd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -39,7 +39,6 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { /** Randomly generate either NaN or one of the two infinity values. */ @@ -110,16 +109,12 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); - } - - public void testExceptionOnMissingTypes() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length == 0); - QueryShardException e = expectThrows(QueryShardException.class, super::testToQuery); - assertThat(e.getMessage(), startsWith("failed to find geo_point field [mapped_geo_point")); + public void testExceptionOnMissingTypes() { + QueryShardContext context = createShardContextWithNoType(); + GeoBoundingBoxQueryBuilder qb = createTestQueryBuilder(); + qb.ignoreUnmapped(false); + QueryShardException e = expectThrows(QueryShardException.class, () -> qb.toQuery(context)); + assertEquals("failed to find geo_point field [" + qb.fieldName() + "]", e.getMessage()); } public void testBrokenCoordinateCannotBeSet() { @@ -295,7 +290,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -308,7 +302,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -327,7 +320,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -340,7 +332,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -353,7 +344,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -366,7 +356,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\":{\n" + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + @@ -513,7 +502,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_bounding_box\": {\n" + " \"validation_method\": \"COERCE\",\n" + @@ -534,7 +522,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); super.testMustRewrite(); } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 7b7a1dcb7a5..0cb6a74570b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -122,7 +122,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); super.testToQuery(); } @@ -148,7 +147,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12mi\",\n" + @@ -162,7 +160,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12mi\",\n" + @@ -173,7 +170,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12mi\",\n" + @@ -184,7 +180,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12mi\",\n" + @@ -195,7 +190,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":12,\n" + @@ -210,7 +204,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12\",\n" + @@ -225,7 +218,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"19.312128\",\n" + @@ -239,7 +231,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":19.312128,\n" + @@ -253,7 +244,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"19.312128\",\n" + @@ -268,7 +258,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":19.312128,\n" + @@ -283,7 +272,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"19.312128km\",\n" + @@ -297,7 +285,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_distance\":{\n" + " \"distance\":\"12mi\",\n" + @@ -312,7 +299,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = parseQuery(query).toQuery(createShardContext()); // TODO: what can we check? } @@ -336,12 +322,6 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); - super.testMustRewrite(); - } - public void testIgnoreUnmapped() throws IOException { final GeoDistanceQueryBuilder queryBuilder = new GeoDistanceQueryBuilder("unmapped").point(0.0, 0.0).distance("20m"); queryBuilder.ignoreUnmapped(true); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index 8f1ae8eee07..ee64e595e08 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -62,17 +62,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); - } - private static List randomPolygon() { ShapeBuilder shapeBuilder = null; // This is a temporary fix because sometimes the RandomShapeGenerator @@ -138,7 +127,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_polygon\":{\n" + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + @@ -154,7 +142,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_polygon\":{\n" + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + @@ -179,7 +166,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_polygon\":{\n" + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + @@ -195,7 +181,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"geo_polygon\":{\n" + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + @@ -234,12 +219,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); - super.testMustRewrite(); - } - public void testIgnoreUnmapped() throws IOException { List polygon = randomPolygon(); final GeoPolygonQueryBuilder queryBuilder = new GeoPolygonQueryBuilder("unmapped", polygon); @@ -255,7 +234,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); String queryInvalidLat = "{\n" + " \"geo_polygon\":{\n" + diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index eafb4995f72..ca9a21973aa 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -153,17 +153,6 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); - } - public void testNoFieldName() throws Exception { ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(null, shape)); @@ -279,7 +268,6 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase 0); ShapeType shapeType = ShapeType.randomType(random()); ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); final GeoShapeQueryBuilder queryBuilder = new GeoShapeQueryBuilder(STRING_FIELD_NAME, shape); diff --git a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index 727821039dc..c146df73019 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -40,23 +40,17 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase @Override protected IdsQueryBuilder doCreateTestQueryBuilder() { - String[] types; - if (getCurrentTypes() != null && getCurrentTypes().length > 0 && randomBoolean()) { - int numberOfTypes = randomIntBetween(1, getCurrentTypes().length); - types = new String[numberOfTypes]; - for (int i = 0; i < numberOfTypes; i++) { - if (frequently()) { - types[i] = randomFrom(getCurrentTypes()); - } else { - types[i] = randomAlphaOfLengthBetween(1, 10); - } - } - } else { - if (randomBoolean()) { - types = new String[]{MetaData.ALL}; + final String type; + if (randomBoolean()) { + if (frequently()) { + type = "_doc"; } else { - types = new String[0]; + type = randomAlphaOfLengthBetween(1, 10); } + } else if (randomBoolean()) { + type = MetaData.ALL; + } else { + type = null; } int numberOfIds = randomIntBetween(0, 10); String[] ids = new String[numberOfIds]; @@ -64,8 +58,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase ids[i] = randomAlphaOfLengthBetween(1, 10); } IdsQueryBuilder query; - if (types.length > 0 || randomBoolean()) { - query = new IdsQueryBuilder().types(types); + if (type != null && randomBoolean()) { + query = new IdsQueryBuilder().types(type); query.addIds(ids); } else { query = new IdsQueryBuilder(); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index 694b63b141b..fd722ef0c77 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -34,6 +34,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; @@ -42,11 +43,8 @@ import static org.hamcrest.Matchers.notNullValue; public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, - INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); - if (fieldName.equals(DATE_FIELD_NAME)) { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - } + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, + DOUBLE_FIELD_NAME, DATE_FIELD_NAME); Object value; if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); @@ -119,7 +117,6 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase 0); MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(DATE_FIELD_NAME, "three term phrase"); matchQuery.analyzer("whitespace"); expectThrows(IllegalArgumentException.class, () -> matchQuery.doToQuery(createShardContext())); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java index e59c5d6e0c4..14ef7940467 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; @@ -45,11 +46,8 @@ import static org.hamcrest.Matchers.notNullValue; public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, - INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); - if (fieldName.equals(DATE_FIELD_NAME)) { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - } + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, + DOUBLE_FIELD_NAME, DATE_FIELD_NAME); Object value; if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index cdaccb486f2..0de9cac8855 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -61,11 +61,8 @@ import static org.hamcrest.Matchers.notNullValue; public class MatchQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, - INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); - if (fieldName.equals(DATE_FIELD_NAME)) { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - } + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, + DOUBLE_FIELD_NAME, DATE_FIELD_NAME); Object value; if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); @@ -279,7 +276,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42); query.fuzziness(randomFuzziness(INT_FIELD_NAME)); QueryShardContext context = createShardContext(); @@ -300,7 +296,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); MatchQueryBuilder query = new MatchQueryBuilder(GEO_POINT_FIELD_NAME, "2,3"); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context)); @@ -352,7 +347,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); MatchQueryBuilder matchQueryBuilder = new MatchQueryBuilder(DOUBLE_FIELD_NAME, 6.075210893508043E-4); matchQueryBuilder.analyzer("simple"); @@ -371,7 +365,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1", context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)); @@ -395,7 +388,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); MatchQuery b = new MatchQuery(context); b.setLenient(true); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index b54ce571453..6ac97373dfa 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -294,7 +294,6 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase 0); String unsupportedField = randomFrom(INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); MoreLikeThisQueryBuilder queryBuilder = new MoreLikeThisQueryBuilder(new String[] {unsupportedField}, new String[]{"some text"}, null) .failOnUnsupportedField(true); diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 9eed4d9f1c1..e30cdaca402 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -65,9 +65,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); - } final Object value; if (fieldName.equals(STRING_FIELD_NAME)) { @@ -173,7 +170,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder("test"); multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5f); @@ -191,7 +187,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext()); assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery bQuery = (BooleanQuery) query; @@ -201,7 +196,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; @@ -212,7 +206,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; @@ -225,7 +218,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; @@ -237,7 +229,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); assertThat(multiMatchQuery("test").field(MISSING_WILDCARD_FIELD_NAME).toQuery(createShardContext()), instanceOf(MatchNoDocsQuery.class)); assertThat(multiMatchQuery("test").field(MISSING_FIELD_NAME).toQuery(createShardContext()), instanceOf(MatchNoDocsQuery.class)); } @@ -307,7 +298,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder(6.075210893508043E-4); multiMatchQueryBuilder.field(DOUBLE_FIELD_NAME); @@ -317,7 +307,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(42).field(INT_FIELD_NAME).field(BOOLEAN_FIELD_NAME); query.fuzziness(randomFuzziness(INT_FIELD_NAME)); QueryShardContext context = createShardContext(); @@ -336,8 +325,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); - MultiMatchQueryBuilder qb = new MultiMatchQueryBuilder("text").field(STRING_FIELD_NAME); qb.fuzziness(Fuzziness.TWO); qb.prefixLength(2); @@ -351,7 +338,6 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); MultiMatchQueryBuilder builder = new MultiMatchQueryBuilder("hello"); // should pass because we set lenient to true when default field is `*` diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index c78c83c154f..2f868d02921 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -109,7 +109,6 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase 0); PrefixQueryBuilder query = prefixQuery(INT_FIELD_NAME, "12*"); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 591ee5af080..87197b662d1 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -401,7 +401,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").defaultField(STRING_FIELD_NAME).toQuery(createShardContext()); assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; @@ -409,7 +408,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("\"term1 term2\"") .defaultField(STRING_FIELD_NAME) .phraseSlop(3) @@ -423,7 +421,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); QueryStringQueryBuilder queryStringQuery = queryStringQuery(STRING_FIELD_NAME + ":boosted^2"); Query query = queryStringQuery.toQuery(shardContext); @@ -463,7 +460,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test1 test2").field(STRING_FIELD_NAME) .toQuery(createShardContext()); assertThat(query, instanceOf(BooleanQuery.class)); @@ -476,7 +472,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").field(STRING_FIELD_NAME) .field(STRING_FIELD_NAME_2) .toQuery(createShardContext()); @@ -490,7 +485,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2) .toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); @@ -501,7 +495,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").field("mapped_str*").toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; @@ -515,7 +508,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").field(STRING_FIELD_NAME, 2.2f) .field(STRING_FIELD_NAME_2) .toQuery(createShardContext()); @@ -527,7 +519,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); QueryStringQueryParser queryParser = new QueryStringQueryParser(createShardContext(), STRING_FIELD_NAME); @@ -550,7 +541,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); QueryStringQueryParser queryParser = new QueryStringQueryParser(createShardContext(), STRING_FIELD_NAME); @@ -583,7 +573,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); QueryStringQueryParser queryParser = new QueryStringQueryParser(createShardContext(), STRING_FIELD_NAME); @@ -698,7 +687,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("/foo*bar/").defaultField(STRING_FIELD_NAME) .maxDeterminizedStates(5000) .toQuery(createShardContext()); @@ -708,7 +696,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder queryBuilder = queryStringQuery("/[ac]*a[ac]{50,200}/").defaultField(STRING_FIELD_NAME); TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class, @@ -721,7 +708,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); { builder.startObject("query_string"); { @@ -744,7 +730,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); { @@ -762,8 +747,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); - int length = randomIntBetween(1, 10); StringBuilder queryString = new StringBuilder(); for (int i = 0; i < length; i++) { @@ -788,7 +771,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME); QueryShardContext context = createShardContext(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -800,7 +782,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder query = queryStringQuery("12*").defaultField(INT_FIELD_NAME); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, @@ -812,7 +793,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder query = queryStringQuery("2,3").defaultField(GEO_POINT_FIELD_NAME); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, @@ -824,7 +804,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); String queryAsString = "{\n" + " \"query_string\":{\n" + " \"time_zone\":\"Europe/Paris\",\n" + @@ -846,7 +825,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); int numBoosts = randomIntBetween(2, 10); float[] boosts = new float[numBoosts + 1]; String queryStringPrefix = ""; @@ -885,7 +863,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder queryStringQueryBuilder = new QueryStringQueryBuilder("\"test phrase\"~2").field(STRING_FIELD_NAME, 5f); Query query = queryStringQueryBuilder.toQuery(createShardContext()); @@ -899,7 +876,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryStringQueryBuilder queryStringQueryBuilder = new QueryStringQueryBuilder("foo bar").field("invalid*"); Query query = queryStringQueryBuilder.toQuery(createShardContext()); @@ -918,7 +894,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); { QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder("foo bar") @@ -1017,16 +992,15 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0) { - if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) { - assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME)))); - } else { - assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME))))); - } + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) + && (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) { + assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME)))); } else { - assertThat(query, equalTo(new MatchNoDocsQuery())); + assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME))))); } + QueryShardContext contextNoType = createShardContextWithNoType(); + query = queryBuilder.toQuery(contextNoType); + assertThat(query, equalTo(new MatchNoDocsQuery())); queryBuilder = new QueryStringQueryBuilder("*:*"); query = queryBuilder.toQuery(context); @@ -1040,7 +1014,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); context.getMapperService().merge("_doc", new CompressedXContent( @@ -1098,7 +1071,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); // Prefix Query query = new QueryStringQueryBuilder("aBc*") .field(STRING_FIELD_NAME) @@ -1161,7 +1133,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); // Term Query query = new QueryStringQueryBuilder("hello") .field(INT_FIELD_NAME) @@ -1207,7 +1178,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); context.getIndexSettings().updateIndexMetaData( newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", @@ -1233,7 +1203,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); // Prefix Query query = new QueryStringQueryBuilder("ONE \"TWO THREE\"") .field(STRING_FIELD_NAME) @@ -1252,7 +1221,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); assertEquals(new TermQuery(new Term(STRING_FIELD_NAME, "bar")), new QueryStringQueryBuilder("bar") @@ -1283,8 +1251,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); - Query query = new QueryStringQueryBuilder("text~2") .field(STRING_FIELD_NAME) .fuzzyPrefixLength(2) @@ -1296,7 +1262,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = new QueryStringQueryBuilder("the quick fox") .field(STRING_FIELD_NAME) .analyzer("stop") @@ -1309,7 +1274,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); Query query = new QueryStringQueryBuilder("the* quick fox") .field(STRING_FIELD_NAME) .analyzer("stop") diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index ba02ddaf05c..6be12cc841a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -133,26 +133,20 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0) { - if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { - expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName)); - } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { - expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName)); - } else { - expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); - } + if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) + && context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { + expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName)); + } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && + context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { + expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName)); } else { - expectedQuery = new MatchNoDocsQuery("no mappings yet"); + expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); } assertThat(query, equalTo(expectedQuery)); - - } else if (getCurrentTypes().length == 0 || - (expectedFieldName.equals(DATE_FIELD_NAME) == false - && expectedFieldName.equals(INT_FIELD_NAME) == false - && expectedFieldName.equals(DATE_RANGE_FIELD_NAME) == false - && expectedFieldName.equals(INT_RANGE_FIELD_NAME) == false)) { + } else if (expectedFieldName.equals(DATE_FIELD_NAME) == false && + expectedFieldName.equals(INT_FIELD_NAME) == false && + expectedFieldName.equals(DATE_RANGE_FIELD_NAME) == false && + expectedFieldName.equals(INT_RANGE_FIELD_NAME) == false) { assertThat(query, instanceOf(TermRangeQuery.class)); TermRangeQuery termRangeQuery = (TermRangeQuery) query; assertThat(termRangeQuery.getField(), equalTo(expectedFieldName)); @@ -165,7 +159,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(IndexOrDocValuesQuery.class)); @@ -267,7 +260,6 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); // We test 01/01/2012 from gte and 2030 for lt String query = "{\n" + " \"range\" : {\n" + @@ -302,7 +294,6 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + @@ -339,7 +330,6 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + @@ -427,19 +417,19 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0) { - if (queryShardContext.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) { - expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(query.fieldName())); - } else { - expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName()))); - } + if (queryShardContext.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) + && queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) { + expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(query.fieldName())); } else { - expectedQuery = new MatchNoDocsQuery("no mappings yet"); + expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName()))); } assertThat(luceneQuery, equalTo(expectedQuery)); + + QueryShardContext queryShardContextWithUnkType = createShardContextWithNoType(); + luceneQuery = rewrittenRange.toQuery(queryShardContextWithUnkType); + assertThat(luceneQuery, equalTo(new MatchNoDocsQuery("no mappings yet"))); } public void testRewriteDateToMatchAllWithTimezoneAndFormat() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java index 1110388090a..4ba6165c061 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java @@ -116,7 +116,6 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase 0); RegexpQueryBuilder query = new RegexpQueryBuilder(INT_FIELD_NAME, "12"); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 3c508ddc8be..a2d6e3ab361 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -247,11 +247,8 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0) { - Query luceneQuery = queryBuilder.toQuery(shardContext); - assertThat(luceneQuery, anyOf(instanceOf(BooleanQuery.class), instanceOf(DisjunctionMaxQuery.class))); - } + Query luceneQuery = queryBuilder.toQuery(shardContext); + assertThat(luceneQuery, anyOf(instanceOf(BooleanQuery.class), instanceOf(DisjunctionMaxQuery.class))); } /* @@ -315,7 +312,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); simpleQueryStringBuilder.field(STRING_FIELD_NAME, 5); @@ -380,7 +376,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); QueryShardContext shardContext = createShardContext(); int numberOfTerms = randomIntBetween(1, 4); StringBuilder queryString = new StringBuilder(); @@ -421,13 +416,10 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0) { - assertThat(query, instanceOf(MatchAllDocsQuery.class)); - } + assertThat(query, instanceOf(MatchAllDocsQuery.class)); } public void testExpandedTerms() throws Exception { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); // Prefix Query query = new SimpleQueryStringBuilder("aBc*") .field(STRING_FIELD_NAME) @@ -456,7 +448,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); SimpleQueryStringQueryParser.Settings settings = new SimpleQueryStringQueryParser.Settings(); settings.analyzeWildcard(true); SimpleQueryStringQueryParser parser = new SimpleQueryStringQueryParser(new StandardAnalyzer(), @@ -480,7 +471,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); SimpleQueryStringQueryParser.Settings settings = new SimpleQueryStringQueryParser.Settings(); settings.analyzeWildcard(true); SimpleQueryStringQueryParser parser = new SimpleQueryStringQueryParser(new MockRepeatAnalyzer(), @@ -512,7 +502,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); SimpleQueryStringQueryParser.Settings settings = new SimpleQueryStringQueryParser.Settings(); settings.analyzeWildcard(true); SimpleQueryStringQueryParser parser = new SimpleQueryStringQueryParser(new MockSynonymAnalyzer(), @@ -557,7 +546,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); SimpleQueryStringQueryParser.Settings settings = new SimpleQueryStringQueryParser.Settings(); settings.analyzeWildcard(true); settings.quoteFieldSuffix("_2"); @@ -575,7 +563,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); context.getIndexSettings().updateIndexMetaData( newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", @@ -598,8 +585,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); - Query query = new SimpleQueryStringBuilder("text~2") .field(STRING_FIELD_NAME) .fuzzyPrefixLength(2) @@ -611,8 +596,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); - Query query = new SimpleQueryStringBuilder("t*") .field(DATE_FIELD_NAME) .field(STRING_FIELD_NAME) @@ -626,7 +609,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); Query query = new SimpleQueryStringBuilder("the quick fox") .field(STRING_FIELD_NAME) .analyzer("stop") @@ -639,7 +621,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0); Query query = new SimpleQueryStringBuilder("the* quick fox") .field(STRING_FIELD_NAME) .analyzer("stop") diff --git a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java index b6cc150717a..6876d021a0a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -138,7 +138,6 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase 0); TermQueryBuilder query = new TermQueryBuilder(GEO_POINT_FIELD_NAME, "2,3"); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 963a08ce753..9ed303316d9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -264,7 +264,6 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase 0); TermsQueryBuilder query = new TermsQueryBuilder(GEO_POINT_FIELD_NAME, "2,3"); QueryShardContext context = createShardContext(); QueryShardException e = expectThrows(QueryShardException.class, diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 48f43eefeb3..54f8536563d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -32,6 +32,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -141,8 +142,6 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase 0); - QueryShardContext context = createShardContext(); String index = context.getFullyQualifiedIndexName(); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 40f6605edf1..cc224019100 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -256,17 +256,6 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); - } - public void testIllegalArguments() { expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder((QueryBuilder) null)); expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder((ScoreFunctionBuilder) null)); @@ -487,7 +476,6 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase 0); String queryString = Strings.toString(jsonBuilder().startObject() .startObject("function_score") .startArray("functions") @@ -650,12 +638,6 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase 0); - super.testMustRewrite(); - } - public void testRewrite() throws IOException { FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString())) diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index a543e87adcb..185a9b5cdce 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -274,8 +274,11 @@ public class FlushIT extends ESIntegTestCase { "out of sync replica; num docs on replica [" + (numDocs + extraDocs) + "]; num docs on primary [" + numDocs + "]")); // Index extra documents to all shards - synced-flush should be ok. for (IndexShard indexShard : indexShards) { - for (int i = 0; i < extraDocs; i++) { - indexDoc(IndexShardTestCase.getEngine(indexShard), "extra_" + i); + // Do reindex documents to the out of sync replica to avoid trigger merges + if (indexShard != outOfSyncReplica) { + for (int i = 0; i < extraDocs; i++) { + indexDoc(IndexShardTestCase.getEngine(indexShard), "extra_" + i); + } } } final ShardsSyncedFlushResult fullResult = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId); diff --git a/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java b/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java new file mode 100644 index 00000000000..c53d798f7b4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/IndexStorePluginTests.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.bootstrap.JavaVersion; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.node.MockNode; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; +import static org.hamcrest.Matchers.hasToString; + +public class IndexStorePluginTests extends ESTestCase { + + public static class BarStorePlugin extends Plugin implements IndexStorePlugin { + + @Override + public Map> getIndexStoreFactories() { + return Collections.singletonMap("store", IndexStore::new); + } + + } + + public static class FooStorePlugin extends Plugin implements IndexStorePlugin { + + @Override + public Map> getIndexStoreFactories() { + return Collections.singletonMap("store", IndexStore::new); + } + + } + + public void testDuplicateIndexStoreProviders() { + final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + final IllegalStateException e = expectThrows( + IllegalStateException.class, () -> new MockNode(settings, Arrays.asList(BarStorePlugin.class, FooStorePlugin.class))); + if (JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0) { + assertThat(e, hasToString(matches( + "java.lang.IllegalStateException: Duplicate key store \\(attempted merging values " + + "org.elasticsearch.plugins.IndexStorePluginTests\\$BarStorePlugin.* " + + "and org.elasticsearch.plugins.IndexStorePluginTests\\$FooStorePlugin.*\\)"))); + } else { + assertThat(e, hasToString(matches( + "java.lang.IllegalStateException: Duplicate key org.elasticsearch.plugins.IndexStorePluginTests\\$BarStorePlugin.*"))); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java index 1f83842eab2..8d67941639f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java @@ -31,11 +31,7 @@ import org.elasticsearch.common.joda.Joda; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBoundsTests; import org.joda.time.DateTimeZone; -import org.junit.Assume; import java.io.IOException; import java.util.ArrayList; @@ -141,7 +137,6 @@ public class DateHistogramTests extends BaseAggregationTestCase 0); // we need mappings FormatDateTimeFormatter format = Joda.forPattern("strict_date_optional_time"); try (Directory dir = newDirectory(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index a891f30b93d..fccec5f784f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -31,7 +31,6 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -40,7 +39,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.Index; @@ -129,20 +127,16 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { protected static Version indexVersionCreated; private static ServiceHolder serviceHolder; + private static ServiceHolder serviceHolderWithNoType; private static int queryNameId = 0; private static Settings nodeSettings; private static Index index; - private static String[] currentTypes; - protected static String[] randomTypes; + private static Index indexWithNoType; protected static Index getIndex() { return index; } - protected static String[] getCurrentTypes() { - return currentTypes; - } - protected Collection> getPlugins() { return Collections.emptyList(); } @@ -153,40 +147,12 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { @BeforeClass public static void beforeClass() { nodeSettings = Settings.builder() - .put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .build(); + .put("node.name", AbstractQueryTestCase.class.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); - index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_"); - - // Set a single type in the index - switch (random().nextInt(3)) { - case 0: - currentTypes = new String[0]; // no types - break; - default: - currentTypes = new String[] { "_doc" }; - break; - } - randomTypes = getRandomTypes(); - } - - private static String[] getRandomTypes() { - String[] types; - if (currentTypes.length > 0 && randomBoolean()) { - int numberOfQueryTypes = randomIntBetween(1, currentTypes.length); - types = new String[numberOfQueryTypes]; - for (int i = 0; i < numberOfQueryTypes; i++) { - types[i] = randomFrom(currentTypes); - } - } else { - if (randomBoolean()) { - types = new String[]{MetaData.ALL}; - } else { - types = new String[0]; - } - } - return types; + index = new Index(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLength(10)); + indexWithNoType = new Index(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLength(10)); } @Override @@ -210,34 +176,37 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { protected Settings indexSettings() { // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. indexVersionCreated = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), null, Version.CURRENT); + : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); return Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated) .build(); } protected static String expectedFieldName(String builderFieldName) { - if (currentTypes.length == 0) { - return builderFieldName; - } return ALIAS_TO_CONCRETE_FIELD_NAME.getOrDefault(builderFieldName, builderFieldName); } @AfterClass public static void afterClass() throws Exception { - IOUtils.close(serviceHolder); + org.apache.lucene.util.IOUtils.close(serviceHolder); + org.apache.lucene.util.IOUtils.close(serviceHolderWithNoType); serviceHolder = null; + serviceHolderWithNoType = null; } @Before public void beforeTest() throws IOException { if (serviceHolder == null) { - serviceHolder = new ServiceHolder(nodeSettings, indexSettings(), getPlugins(), this); + serviceHolder = new ServiceHolder(nodeSettings, indexSettings(), getPlugins(), this, true); } serviceHolder.clientInvocationHandler.delegate = this; + if (serviceHolderWithNoType == null) { + serviceHolderWithNoType = new ServiceHolder(nodeSettings, indexSettings(), getPlugins(), this, false); + } + serviceHolderWithNoType.clientInvocationHandler.delegate = this; } - protected static SearchContext getSearchContext(String[] types, QueryShardContext context) { + protected static SearchContext getSearchContext(QueryShardContext context) { TestSearchContext testSearchContext = new TestSearchContext(context) { @Override public MapperService mapperService() { @@ -250,13 +219,13 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { } }; - testSearchContext.getQueryShardContext().setTypes(types); return testSearchContext; } @After public void afterTest() { serviceHolder.clientInvocationHandler.delegate = null; + serviceHolderWithNoType.clientInvocationHandler.delegate = null; } /** @@ -280,6 +249,13 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { return serviceHolder.createShardContext(reader); } + /** + * @return a new {@link QueryShardContext} based on an index with no type registered + */ + protected static QueryShardContext createShardContextWithNoType() { + return serviceHolderWithNoType.createShardContext(null); + } + /** * @return a new {@link QueryShardContext} based on the base test index and queryParserService */ @@ -331,8 +307,11 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { private final Client client; private final long nowInMillis = randomNonNegativeLong(); - ServiceHolder(Settings nodeSettings, Settings indexSettings, - Collection> plugins, AbstractBuilderTestCase testCase) throws IOException { + ServiceHolder(Settings nodeSettings, + Settings indexSettings, + Collection> plugins, + AbstractBuilderTestCase testCase, + boolean registerType) throws IOException { Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings); PluginsService pluginsService; pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins); @@ -379,9 +358,8 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { } }); - - for (String type : currentTypes) { - mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type, + if (registerType) { + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", STRING_ALIAS_FIELD_NAME, "type=alias,path=" + STRING_FIELD_NAME, @@ -399,12 +377,12 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { GEO_SHAPE_FIELD_NAME, "type=geo_shape" ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field - mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," - + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + - INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), - MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("_doc", new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + + INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), + MapperService.MergeReason.MAPPING_UPDATE); + testCase.initializeAdditionalMappings(mapperService); } - testCase.initializeAdditionalMappings(mapperService); } @Override @@ -423,5 +401,4 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { return new ScriptModule(Settings.EMPTY, scriptPlugins); } } - } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index c1efd9d8e6a..f71edfde84f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -413,7 +413,7 @@ public abstract class AbstractQueryTestCase> context.setAllowUnmappedFields(true); QB firstQuery = createTestQueryBuilder(); QB controlQuery = copyQuery(firstQuery); - SearchContext searchContext = getSearchContext(randomTypes, context); + SearchContext searchContext = getSearchContext(context); /* we use a private rewrite context here since we want the most realistic way of asserting that we are cacheable or not. * We do it this way in SearchService where * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ @@ -443,7 +443,7 @@ public abstract class AbstractQueryTestCase> secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10)); } - searchContext = getSearchContext(randomTypes, context); + searchContext = getSearchContext(context); Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotNull("toQuery should not return null", secondLuceneQuery); assertLuceneQuery(secondQuery, secondLuceneQuery, searchContext); @@ -668,10 +668,11 @@ public abstract class AbstractQueryTestCase> */ protected static String getRandomFieldName() { // if no type is set then return a random field name - if (getCurrentTypes().length == 0 || randomBoolean()) { + if (randomBoolean()) { return randomAlphaOfLengthBetween(1, 10); + } else { + return randomFrom(MAPPED_LEAF_FIELD_NAMES); } - return randomFrom(MAPPED_LEAF_FIELD_NAMES); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 44fe621dfa4..ee4f2f3b222 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -201,7 +201,7 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("log4j2.disable.jmx", "true"); // Enable Netty leak detection and monitor logger for logged leak errors - System.setProperty("io.netty.leakDetection.level", "advanced"); + System.setProperty("io.netty.leakDetection.level", "paranoid"); String leakLoggerName = "io.netty.util.ResourceLeakDetector"; Logger leakLogger = LogManager.getLogger(leakLoggerName); Appender leakAppender = new AbstractAppender(leakLoggerName, null, diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 921b819b9b7..8a22383dcae 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; import java.util.Arrays; @@ -42,13 +43,14 @@ import java.util.EnumSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; public class MockFSIndexStore extends IndexStore { public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope, Property.NodeScope); - public static class TestPlugin extends Plugin { + public static class TestPlugin extends Plugin implements IndexStorePlugin { @Override public Settings additionalSettings() { return Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mock").build(); @@ -64,6 +66,11 @@ public class MockFSIndexStore extends IndexStore { MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING); } + @Override + public Map> getIndexStoreFactories() { + return Collections.singletonMap("mock", MockFSIndexStore::new); + } + @Override public void onIndexModule(IndexModule indexModule) { Settings indexSettings = indexModule.getSettings(); @@ -71,7 +78,6 @@ public class MockFSIndexStore extends IndexStore { if (INDEX_CHECK_INDEX_ON_CLOSE_SETTING.get(indexSettings)) { indexModule.addIndexEventListener(new Listener()); } - indexModule.addIndexStore("mock", MockFSIndexStore::new); } } } diff --git a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc index 3c4e330722f..fb3424cab3f 100644 --- a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc +++ b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc @@ -434,17 +434,20 @@ A rule has the following properties: as usual. This action is suitable when certain values are expected to be consistently anomalous and they affect the model in a way that negatively impacts the rest of the results. + `scope`:: - (object) An optional scope of series where the rule applies. By default the scope - includes all series. Scoping is allowed for any of the partition/by/over fields. - To add a scope for a field add the field name as a key in the scope object and - set its value to an object with properties: - `filter_id`:: - (string) The id of the <> to be used. - `filter_type`:: - (string) Either `include` (the rule applies for values in the filter) - or `exclude` (the rule applies for values not in the filter). Defaults - to `include`. + (object) An optional scope of series where the rule applies. By default, the + scope includes all series. Scoping is allowed for any of the fields that are + also specified in `by_field_name`, `over_field_name`, or `partition_field_name`. + To add a scope for a field, add the field name as a key in the scope object and + set its value to an object with the following properties: +`filter_id`::: + (string) The id of the <> to be used. + +`filter_type`::: + (string) Either `include` (the rule applies for values in the filter) + or `exclude` (the rule applies for values not in the filter). Defaults + to `include`. `conditions`:: (array) An optional array of numeric conditions when the rule applies. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 69cead84b7e..49449e44d98 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -420,7 +420,7 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl return Arrays.asList( // ML - Custom metadata new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField("ml"), - parser -> MlMetadata.METADATA_PARSER.parse(parser, null).build()), + parser -> MlMetadata.LENIENT_PARSER.parse(parser, null).build()), // ML - Persistent action requests new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(StartDatafeedAction.TASK_NAME), StartDatafeedAction.DatafeedParams::fromXContent), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index f3505835994..e0b71abe966 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -61,12 +61,12 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap()); // This parser follows the pattern that metadata is parsed leniently (to allow for enhancements) - public static final ObjectParser METADATA_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new); + public static final ObjectParser LENIENT_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new); static { - METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.METADATA_PARSER.apply(p, c).build(), JOBS_FIELD); - METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, - (p, c) -> DatafeedConfig.METADATA_PARSER.apply(p, c).build(), DATAFEEDS_FIELD); + LENIENT_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.LENIENT_PARSER.apply(p, c).build(), JOBS_FIELD); + LENIENT_PARSER.declareObjectArray(Builder::putDatafeeds, + (p, c) -> DatafeedConfig.LENIENT_PARSER.apply(p, c).build(), DATAFEEDS_FIELD); } private final SortedMap jobs; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlParserType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlParserType.java deleted file mode 100644 index 64f52ab2d2c..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlParserType.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.ml; - -/** - * In order to allow enhancements that require additions to the ML custom cluster state to be made in minor versions, - * when we parse our metadata from persisted cluster state we ignore unknown fields. However, we don't want to be - * lenient when parsing config as this would mean user mistakes could go undetected. Therefore, for all JSON objects - * that are used in both custom cluster state and config we have two parsers, one tolerant of unknown fields (for - * parsing cluster state) and one strict (for parsing config). This class enumerates the two options. - */ -public enum MlParserType { - - METADATA, CONFIG; - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index a0c757a0be6..4d3f720026e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -39,7 +39,7 @@ public class PutDatafeedAction extends Action { public static class Request extends AcknowledgedRequest implements ToXContentObject { public static Request parseRequest(String datafeedId, XContentParser parser) { - DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null); + DatafeedConfig.Builder datafeed = DatafeedConfig.STRICT_PARSER.apply(parser, null); datafeed.setId(datafeedId); return new Request(datafeed.build()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 7e85198d214..f241e4bd375 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -42,7 +42,7 @@ public class PutJobAction extends Action { public static class Request extends AcknowledgedRequest implements ToXContentObject { public static Request parseRequest(String jobId, XContentParser parser) { - Job.Builder jobBuilder = Job.CONFIG_PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null); if (jobBuilder.getId() == null) { jobBuilder.setId(jobId); } else if (!Strings.isNullOrEmpty(jobId) && !jobId.equals(jobBuilder.getId())) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java index 0e807664d86..15dd8721f06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java @@ -48,7 +48,7 @@ public class ValidateDetectorAction extends Action METADATA_PARSER = new ConstructingObjectParser<>( - "chunking_config", true, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); - public static final ConstructingObjectParser CONFIG_PARSER = new ConstructingObjectParser<>( - "chunking_config", false, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); - public static final Map> PARSERS = - new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return Mode.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODE_FIELD, ValueType.STRING); - parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, TIME_SPAN_FIELD, ValueType.STRING); - } + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + "chunking_config", ignoreUnknownFields, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); + + parser.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Mode.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODE_FIELD, ValueType.STRING); + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, TIME_SPAN_FIELD, ValueType.STRING); + + return parser; } private final Mode mode; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index e9ac704171b..1034b00af0a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -25,7 +25,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -38,7 +37,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; -import java.util.EnumMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -87,44 +85,46 @@ public class DatafeedConfig extends AbstractDiffable implements public static final ParseField HEADERS = new ParseField("headers"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser METADATA_PARSER = new ObjectParser<>("datafeed_config", true, Builder::new); - public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("datafeed_config", false, Builder::new); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ObjectParser LENIENT_PARSER = createParser(true); + public static final ObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareString(Builder::setId, ID); - parser.declareString(Builder::setJobId, Job.ID); - parser.declareStringArray(Builder::setIndices, INDEXES); - parser.declareStringArray(Builder::setIndices, INDICES); - parser.declareStringArray(Builder::setTypes, TYPES); - parser.declareString((builder, val) -> - builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); - parser.declareString((builder, val) -> - builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); - parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY); - parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS); - parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS); - parser.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - return parsedScriptFields; - }, SCRIPT_FIELDS); - parser.declareInt(Builder::setScrollSize, SCROLL_SIZE); - // TODO this is to read former _source field. Remove in v7.0.0 - parser.declareBoolean((builder, value) -> {}, SOURCE); - parser.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSERS.get(parserType), CHUNKING_CONFIG); + private static ObjectParser createParser(boolean ignoreUnknownFields) { + ObjectParser parser = new ObjectParser<>("datafeed_config", ignoreUnknownFields, Builder::new); + + parser.declareString(Builder::setId, ID); + parser.declareString(Builder::setJobId, Job.ID); + parser.declareStringArray(Builder::setIndices, INDEXES); + parser.declareStringArray(Builder::setIndices, INDICES); + parser.declareStringArray(Builder::setTypes, TYPES); + parser.declareString((builder, val) -> + builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); + parser.declareString((builder, val) -> + builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); + parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY); + parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS); + parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS); + parser.declareObject(Builder::setScriptFields, (p, c) -> { + List parsedScriptFields = new ArrayList<>(); + while (p.nextToken() != XContentParser.Token.END_OBJECT) { + parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); + } + parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); + return parsedScriptFields; + }, SCRIPT_FIELDS); + parser.declareInt(Builder::setScrollSize, SCROLL_SIZE); + // TODO this is to read former _source field. Remove in v7.0.0 + parser.declareBoolean((builder, value) -> { + }, SOURCE); + parser.declareObject(Builder::setChunkingConfig, ignoreUnknownFields ? ChunkingConfig.LENIENT_PARSER : ChunkingConfig.STRICT_PARSER, + CHUNKING_CONFIG); + + if (ignoreUnknownFields) { + // Headers are not parsed by the strict (config) parser, so headers supplied in the _body_ of a REST request will be rejected. + // (For config, headers are explicitly transferred from the auth headers by code in the put/update datafeed actions.) + parser.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS); } - // Headers are only parsed by the metadata parser, so headers supplied in the _body_ of a REST request will be rejected. - // (For config headers are explicitly transferred from the auth headers by code in the put/update datafeed actions.) - METADATA_PARSER.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS); + + return parser; } private final String id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 5d8fd3ffc71..f3748cefc51 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -68,7 +68,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { return parsedScriptFields; }, DatafeedConfig.SCRIPT_FIELDS); PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.CONFIG_PARSER, DatafeedConfig.CHUNKING_CONFIG); + PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.STRICT_PARSER, DatafeedConfig.CHUNKING_CONFIG); } private final String id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 0c702e5afb0..371cdd8c038 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -24,10 +23,8 @@ import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.EnumMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedSet; @@ -76,46 +73,38 @@ public class AnalysisConfig implements ToXContentObject, Writeable { public static final long DEFAULT_RESULT_FINALIZATION_WINDOW = 2L; // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser METADATA_PARSER = - new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), true, - a -> new AnalysisConfig.Builder((List) a[0])); - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser CONFIG_PARSER = - new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), false, - a -> new AnalysisConfig.Builder((List) a[0])); - public static final Map> PARSERS = - new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> Detector.PARSERS.get(parserType).apply(p, c).build(), DETECTORS); - parser.declareString((builder, val) -> - builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN); - parser.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); - parser.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); - // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not - // possible to simply declare whether the field is a string or object and a completely custom parser is required - parser.declareField(Builder::setCategorizationAnalyzerConfig, - (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p, parserType), - CATEGORIZATION_ANALYZER, ObjectParser.ValueType.OBJECT_OR_STRING); - parser.declareString((builder, val) -> - builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); - parser.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); - parser.declareStringArray(Builder::setInfluencers, INFLUENCERS); - parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); - parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); - parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); - parser.declareStringArray((builder, values) -> builder.setMultipleBucketSpans( - values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName())) - .collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS); - parser.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION); - } + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), + ignoreUnknownFields, a -> new AnalysisConfig.Builder((List) a[0])); + + parser.declareObjectArray(ConstructingObjectParser.constructorArg(), + (p, c) -> (ignoreUnknownFields ? Detector.LENIENT_PARSER : Detector.STRICT_PARSER).apply(p, c).build(), DETECTORS); + parser.declareString((builder, val) -> + builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN); + parser.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); + parser.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); + // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not + // possible to simply declare whether the field is a string or object and a completely custom parser is required + parser.declareField(Builder::setCategorizationAnalyzerConfig, + (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p, ignoreUnknownFields), + CATEGORIZATION_ANALYZER, ObjectParser.ValueType.OBJECT_OR_STRING); + parser.declareString((builder, val) -> + builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); + parser.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); + parser.declareStringArray(Builder::setInfluencers, INFLUENCERS); + parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); + parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); + parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); + parser.declareStringArray((builder, values) -> builder.setMultipleBucketSpans( + values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName())) + .collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS); + parser.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION); + + return parser; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java index 569d62a02cf..797df5892f8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java @@ -17,13 +17,10 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.EnumMap; -import java.util.Map; import java.util.Objects; /** @@ -50,31 +47,26 @@ public class AnalysisLimits implements ToXContentObject, Writeable { public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser METADATA_PARSER = new ConstructingObjectParser<>( - "analysis_limits", true, a -> new AnalysisLimits( - a[0] == null ? PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB : (Long) a[0], - a[1] == null ? DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT : (Long) a[1])); - public static final ConstructingObjectParser CONFIG_PARSER = new ConstructingObjectParser<>( - "analysis_limits", false, a -> new AnalysisLimits((Long) a[0], (Long) a[1])); - public static final Map> PARSERS = - new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); - parser.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); - } + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + "analysis_limits", ignoreUnknownFields, a -> ignoreUnknownFields ? new AnalysisLimits( + a[0] == null ? PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB : (Long) a[0], + a[1] == null ? DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT : (Long) a[1]) : new AnalysisLimits((Long) a[0], (Long) a[1])); + + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); + parser.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); + + return parser; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java index fd0fde76e68..36c25e0a7a7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; -import org.elasticsearch.xpack.core.ml.MlParserType; import java.io.IOException; import java.util.ArrayList; @@ -61,7 +60,8 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab /** * This method is only used in the unit tests - in production code this config is always parsed as a fragment. */ - public static CategorizationAnalyzerConfig buildFromXContentObject(XContentParser parser, MlParserType parserType) throws IOException { + public static CategorizationAnalyzerConfig buildFromXContentObject(XContentParser parser, boolean ignoreUnknownFields) + throws IOException { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Expected start object but got [" + parser.currentToken() + "]"); @@ -71,7 +71,7 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab throw new IllegalArgumentException("Expected [" + CATEGORIZATION_ANALYZER + "] field but got [" + parser.currentToken() + "]"); } parser.nextToken(); - CategorizationAnalyzerConfig categorizationAnalyzerConfig = buildFromXContentFragment(parser, parserType); + CategorizationAnalyzerConfig categorizationAnalyzerConfig = buildFromXContentFragment(parser, ignoreUnknownFields); parser.nextToken(); return categorizationAnalyzerConfig; } @@ -83,7 +83,7 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab * * The parser is strict when parsing config and lenient when parsing cluster state. */ - static CategorizationAnalyzerConfig buildFromXContentFragment(XContentParser parser, MlParserType parserType) throws IOException { + static CategorizationAnalyzerConfig buildFromXContentFragment(XContentParser parser, boolean ignoreUnknownFields) throws IOException { CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); @@ -131,7 +131,7 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab } } // Be lenient when parsing cluster state - assume unknown fields are from future versions - } else if (parserType == MlParserType.CONFIG) { + } else if (ignoreUnknownFields == false) { throw new IllegalArgumentException("Parameter [" + currentFieldName + "] in [" + CATEGORIZATION_ANALYZER + "] is unknown or of the wrong type [" + token + "]"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java index 6e9652bdfa2..87c084baeac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java @@ -14,16 +14,13 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.time.DateTimeFormatterTimestampConverter; import java.io.IOException; import java.time.ZoneOffset; -import java.util.EnumMap; import java.util.Locale; -import java.util.Map; import java.util.Objects; /** @@ -126,24 +123,20 @@ public class DataDescription implements ToXContentObject, Writeable { private final Character quoteCharacter; // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser METADATA_PARSER = - new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), true, Builder::new); - public static final ObjectParser CONFIG_PARSER = - new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), false, Builder::new); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ObjectParser LENIENT_PARSER = createParser(true); + public static final ObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareString(Builder::setFormat, FORMAT_FIELD); - parser.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); - parser.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); - parser.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING); - parser.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING); - } + private static ObjectParser createParser(boolean ignoreUnknownFields) { + ObjectParser parser = + new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), ignoreUnknownFields, Builder::new); + + parser.declareString(Builder::setFormat, FORMAT_FIELD); + parser.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); + parser.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); + parser.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING); + parser.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING); + + return parser; } public DataDescription(DataFormat dataFormat, String timeFieldName, String timeFormat, Character fieldDelimiter, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java index fbdb2f6662a..25cd0cffe7b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java @@ -13,17 +13,14 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.EnumMap; import java.util.EnumSet; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.Set; @@ -37,23 +34,18 @@ public class DetectionRule implements ToXContentObject, Writeable { public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser METADATA_PARSER = - new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new); - public static final ObjectParser CONFIG_PARSER = - new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), false, Builder::new); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ObjectParser LENIENT_PARSER = createParser(true); + public static final ObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareStringArray(Builder::setActions, ACTIONS_FIELD); - parser.declareObject(Builder::setScope, RuleScope.parser(parserType), SCOPE_FIELD); - parser.declareObjectArray(Builder::setConditions, (p, c) -> - RuleCondition.PARSERS.get(parserType).apply(p, c), CONDITIONS_FIELD); - } + private static ObjectParser createParser(boolean ignoreUnknownFields) { + ObjectParser parser = new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), ignoreUnknownFields, Builder::new); + + parser.declareStringArray(Builder::setActions, ACTIONS_FIELD); + parser.declareObject(Builder::setScope, RuleScope.parser(ignoreUnknownFields), SCOPE_FIELD); + parser.declareObjectArray(Builder::setConditions, ignoreUnknownFields ? RuleCondition.LENIENT_PARSER : RuleCondition.STRICT_PARSER, + CONDITIONS_FIELD); + + return parser; } private final EnumSet actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java index dc4b55d73a5..93aa5495c40 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.process.autodetect.writer.RecordWriter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -26,12 +25,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.EnumMap; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -89,33 +86,31 @@ public class Detector implements ToXContentObject, Writeable { public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser METADATA_PARSER = new ObjectParser<>("detector", true, Builder::new); - public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("detector", false, Builder::new); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ObjectParser LENIENT_PARSER = createParser(true); + public static final ObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); - parser.declareString(Builder::setFunction, FUNCTION_FIELD); - parser.declareString(Builder::setFieldName, FIELD_NAME_FIELD); - parser.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); - parser.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); - parser.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); - parser.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); - parser.declareField(Builder::setExcludeFrequent, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ExcludeFrequent.forString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING); - parser.declareObjectArray(Builder::setRules, (p, c) -> - DetectionRule.PARSERS.get(parserType).apply(p, c).build(), CUSTOM_RULES_FIELD); - parser.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); - } + private static ObjectParser createParser(boolean ignoreUnknownFields) { + ObjectParser parser = new ObjectParser<>("detector", ignoreUnknownFields, Builder::new); + + parser.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); + parser.declareString(Builder::setFunction, FUNCTION_FIELD); + parser.declareString(Builder::setFieldName, FIELD_NAME_FIELD); + parser.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); + parser.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); + parser.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); + parser.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); + parser.declareField(Builder::setExcludeFrequent, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ExcludeFrequent.forString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING); + parser.declareObjectArray(Builder::setRules, + (p, c) -> (ignoreUnknownFields ? DetectionRule.LENIENT_PARSER : DetectionRule.STRICT_PARSER).apply(p, c).build(), + CUSTOM_RULES_FIELD); + parser.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); + + return parser; } public static final String BY = "by"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/FilterRef.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/FilterRef.java index 7f3fb562879..9410e37250f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/FilterRef.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/FilterRef.java @@ -14,12 +14,9 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import java.io.IOException; -import java.util.EnumMap; import java.util.Locale; -import java.util.Map; import java.util.Objects; public class FilterRef implements ToXContentObject, Writeable { @@ -42,28 +39,22 @@ public class FilterRef implements ToXContentObject, Writeable { } // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser METADATA_PARSER = - new ConstructingObjectParser<>(FILTER_REF_FIELD.getPreferredName(), true, - a -> new FilterRef((String) a[0], (FilterType) a[1])); - public static final ConstructingObjectParser CONFIG_PARSER = - new ConstructingObjectParser<>(FILTER_REF_FIELD.getPreferredName(), false, - a -> new FilterRef((String) a[0], (FilterType) a[1])); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareString(ConstructingObjectParser.constructorArg(), FILTER_ID); - parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return FilterType.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, FILTER_TYPE, ObjectParser.ValueType.STRING); - } + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(FILTER_REF_FIELD.getPreferredName(), + ignoreUnknownFields, a -> new FilterRef((String) a[0], (FilterType) a[1])); + + parser.declareString(ConstructingObjectParser.constructorArg(), FILTER_ID); + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return FilterType.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, FILTER_TYPE, ObjectParser.ValueType.STRING); + + return parser; } private final String filterId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index 560bac895fa..0005d16a99c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; @@ -34,7 +33,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; -import java.util.EnumMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -85,69 +83,70 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final ParseField RESULTS_FIELD = new ParseField("jobs"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser METADATA_PARSER = new ObjectParser<>("job_details", true, Builder::new); - public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("job_details", false, Builder::new); - public static final Map> PARSERS = new EnumMap<>(MlParserType.class); + public static final ObjectParser LENIENT_PARSER = createParser(true); + public static final ObjectParser STRICT_PARSER = createParser(false); public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1); public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = new ByteSizeValue(100, ByteSizeUnit.MB); public static final long DEFAULT_MODEL_SNAPSHOT_RETENTION_DAYS = 1; - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareString(Builder::setId, ID); - parser.declareString(Builder::setJobType, JOB_TYPE); - parser.declareString(Builder::setJobVersion, JOB_VERSION); - parser.declareStringArray(Builder::setGroups, GROUPS); - parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); - parser.declareField(Builder::setCreateTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + - "] for [" + CREATE_TIME.getPreferredName() + "]"); - }, CREATE_TIME, ValueType.VALUE); - parser.declareField(Builder::setFinishedTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]"); - }, FINISHED_TIME, ValueType.VALUE); - parser.declareField(Builder::setLastDataTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); - }, LAST_DATA_TIME, ValueType.VALUE); - parser.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY); - parser.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSERS.get(parserType), ANALYSIS_CONFIG); - parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSERS.get(parserType), ANALYSIS_LIMITS); - parser.declareObject(Builder::setDataDescription, DataDescription.PARSERS.get(parserType), DATA_DESCRIPTION); - parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSERS.get(parserType), MODEL_PLOT_CONFIG); - parser.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); - parser.declareString((builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL); - parser.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); - parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); - parser.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); - parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); - parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - parser.declareBoolean(Builder::setDeleted, DELETED); - } + private static ObjectParser createParser(boolean ignoreUnknownFields) { + ObjectParser parser = new ObjectParser<>("job_details", ignoreUnknownFields, Builder::new); + + parser.declareString(Builder::setId, ID); + parser.declareString(Builder::setJobType, JOB_TYPE); + parser.declareString(Builder::setJobVersion, JOB_VERSION); + parser.declareStringArray(Builder::setGroups, GROUPS); + parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); + parser.declareField(Builder::setCreateTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException("unexpected token [" + p.currentToken() + + "] for [" + CREATE_TIME.getPreferredName() + "]"); + }, CREATE_TIME, ValueType.VALUE); + parser.declareField(Builder::setFinishedTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException( + "unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]"); + }, FINISHED_TIME, ValueType.VALUE); + parser.declareField(Builder::setLastDataTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException( + "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); + }, LAST_DATA_TIME, ValueType.VALUE); + parser.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY); + parser.declareObject(Builder::setAnalysisConfig, ignoreUnknownFields ? AnalysisConfig.LENIENT_PARSER : AnalysisConfig.STRICT_PARSER, + ANALYSIS_CONFIG); + parser.declareObject(Builder::setAnalysisLimits, ignoreUnknownFields ? AnalysisLimits.LENIENT_PARSER : AnalysisLimits.STRICT_PARSER, + ANALYSIS_LIMITS); + parser.declareObject(Builder::setDataDescription, + ignoreUnknownFields ? DataDescription.LENIENT_PARSER : DataDescription.STRICT_PARSER, DATA_DESCRIPTION); + parser.declareObject(Builder::setModelPlotConfig, + ignoreUnknownFields ? ModelPlotConfig.LENIENT_PARSER : ModelPlotConfig.STRICT_PARSER, MODEL_PLOT_CONFIG); + parser.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); + parser.declareString((builder, val) -> builder.setBackgroundPersistInterval( + TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL); + parser.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); + parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); + parser.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); + parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); + parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); + parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); + parser.declareBoolean(Builder::setDeleted, DELETED); + + return parser; } private final String jobId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java index 7b6843a2415..380f540a317 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java @@ -44,8 +44,8 @@ public class JobUpdate implements Writeable, ToXContentObject { parser.declareStringArray(Builder::setGroups, Job.GROUPS); parser.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); parser.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG); - parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.CONFIG_PARSER, Job.ANALYSIS_LIMITS); + parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.STRICT_PARSER, Job.MODEL_PLOT_CONFIG); + parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.STRICT_PARSER, Job.ANALYSIS_LIMITS); parser.declareString((builder, val) -> builder.setBackgroundPersistInterval( TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL); parser.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); @@ -533,7 +533,7 @@ public class JobUpdate implements Writeable, ToXContentObject { PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (parser, parseFieldMatcher) -> - DetectionRule.CONFIG_PARSER.apply(parser, parseFieldMatcher).build(), Detector.CUSTOM_RULES_FIELD); + DetectionRule.STRICT_PARSER.apply(parser, parseFieldMatcher).build(), Detector.CUSTOM_RULES_FIELD); } private int detectorIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java index d0995d22c7e..98aa618dd1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java @@ -12,11 +12,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.MlParserType; import java.io.IOException; -import java.util.EnumMap; -import java.util.Map; import java.util.Objects; public class ModelPlotConfig implements ToXContentObject, Writeable { @@ -26,24 +23,17 @@ public class ModelPlotConfig implements ToXContentObject, Writeable { public static final ParseField TERMS_FIELD = new ParseField("terms"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser METADATA_PARSER = - new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), true, - a -> new ModelPlotConfig((boolean) a[0], (String) a[1])); - public static final ConstructingObjectParser CONFIG_PARSER = - new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), false, - a -> new ModelPlotConfig((boolean) a[0], (String) a[1])); - public static final Map> PARSERS = - new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); - } + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), + ignoreUnknownFields, a -> new ModelPlotConfig((boolean) a[0], (String) a[1])); + + parser.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); + + return parser; } private final boolean enabled; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java index 378ceaca6c4..25fac755931 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java @@ -14,12 +14,9 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import java.io.IOException; -import java.util.EnumMap; import java.util.Locale; -import java.util.Map; import java.util.Objects; public class RuleCondition implements ToXContentObject, Writeable { @@ -30,35 +27,28 @@ public class RuleCondition implements ToXContentObject, Writeable { public static final ParseField VALUE_FIELD = new ParseField("value"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser METADATA_PARSER = - new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), true, - a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2])); - public static final ConstructingObjectParser CONFIG_PARSER = - new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), false, - a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2])); - public static final Map> PARSERS = - new EnumMap<>(MlParserType.class); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); - static { - PARSERS.put(MlParserType.METADATA, METADATA_PARSER); - PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); - for (MlParserType parserType : MlParserType.values()) { - ConstructingObjectParser parser = PARSERS.get(parserType); - assert parser != null; - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return AppliesTo.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, APPLIES_TO_FIELD, ValueType.STRING); - parser.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return Operator.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, Operator.OPERATOR_FIELD, ValueType.STRING); - parser.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD); - } + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), + ignoreUnknownFields, a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2])); + + parser.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return AppliesTo.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, APPLIES_TO_FIELD, ValueType.STRING); + parser.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Operator.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, Operator.OPERATOR_FIELD, ValueType.STRING); + parser.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD); + + return parser; } private final AppliesTo appliesTo; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java index 0b11fa0e15b..0c633c6ead2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -32,13 +31,14 @@ import java.util.stream.Collectors; public class RuleScope implements ToXContentObject, Writeable { - public static ContextParser parser(MlParserType parserType) { + public static ContextParser parser(boolean ignoreUnknownFields) { return (p, c) -> { Map unparsedScope = p.map(); if (unparsedScope.isEmpty()) { return new RuleScope(); } - ConstructingObjectParser filterRefParser = FilterRef.PARSERS.get(parserType); + ConstructingObjectParser filterRefParser = + ignoreUnknownFields ? FilterRef.LENIENT_PARSER : FilterRef.STRICT_PARSER; Map scope = new HashMap<>(); for (Map.Entry entry : unparsedScope.entrySet()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/ChunkingConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/ChunkingConfigTests.java index ef89200b765..f91d7389366 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/ChunkingConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/ChunkingConfigTests.java @@ -32,7 +32,7 @@ public class ChunkingConfigTests extends AbstractSerializingTestCase DatafeedConfig.CONFIG_PARSER.apply(parser, null).build()); + () -> DatafeedConfig.STRICT_PARSER.apply(parser, null).build()); assertEquals("[6:5] [datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); } @@ -164,7 +164,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase AnalysisLimits.CONFIG_PARSER.apply(parser, null)); + XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null)); assertThat(ExceptionsHelper.detailedMessage(e), containsString("model_memory_limit must be at least 1 MiB. Value = -1")); } @@ -56,7 +56,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.CONFIG_PARSER.apply(parser, null)); + XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null)); assertThat(ExceptionsHelper.detailedMessage(e), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } @@ -65,7 +65,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.CONFIG_PARSER.apply(parser, null)); + XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null)); assertThat(ExceptionsHelper.detailedMessage(e), containsString("Values less than -1 bytes are not supported: -4mb")); } @@ -82,7 +82,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.CONFIG_PARSER.apply(parser, null)); + XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null)); assertThat(ExceptionsHelper.detailedMessage(e), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } @@ -90,7 +90,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.CONFIG_PARSER.apply(parser, null)); + XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null)); assertThat(ExceptionsHelper.detailedMessage(e), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } @@ -99,7 +99,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase DataDescription.CONFIG_PARSER.apply(parser, null)); + () -> DataDescription.STRICT_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -226,7 +226,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase DataDescription.CONFIG_PARSER.apply(parser, null)); + () -> DataDescription.STRICT_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -240,7 +240,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase DataDescription.CONFIG_PARSER.apply(parser, null)); + () -> DataDescription.STRICT_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -284,7 +284,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase { @Override protected Detector doParseInstance(XContentParser parser) { - return Detector.CONFIG_PARSER.apply(parser, null).build(); + return Detector.STRICT_PARSER.apply(parser, null).build(); } public void testVerifyFieldNames_givenInvalidChars() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/FilterRefTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/FilterRefTests.java index 241bf659332..71530110687 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/FilterRefTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/FilterRefTests.java @@ -20,11 +20,11 @@ public class FilterRefTests extends AbstractSerializingTestCase { @Override protected FilterRef doParseInstance(XContentParser parser) throws IOException { - return FilterRef.CONFIG_PARSER.parse(parser, null); + return FilterRef.STRICT_PARSER.parse(parser, null); } @Override protected Writeable.Reader instanceReader() { return FilterRef::new; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 0f35abd4bcf..88d9b07816d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -74,14 +74,14 @@ public class JobTests extends AbstractSerializingTestCase { @Override protected Job doParseInstance(XContentParser parser) { - return Job.CONFIG_PARSER.apply(parser, null).build(); + return Job.STRICT_PARSER.apply(parser, null).build(); } public void testFutureConfigParse() throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_JOB); XContentParseException e = expectThrows(XContentParseException.class, - () -> Job.CONFIG_PARSER.apply(parser, null).build()); + () -> Job.STRICT_PARSER.apply(parser, null).build()); assertEquals("[4:5] [job_details] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); } @@ -89,7 +89,7 @@ public class JobTests extends AbstractSerializingTestCase { XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_JOB); // Unlike the config version of this test, the metadata parser should tolerate the unknown future field - assertNotNull(Job.METADATA_PARSER.apply(parser, null).build()); + assertNotNull(Job.LENIENT_PARSER.apply(parser, null).build()); } public void testConstructor_GivenEmptyJobConfiguration() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfigTests.java index aa54a174194..c57f637d572 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfigTests.java @@ -31,6 +31,6 @@ public class ModelPlotConfigTests extends AbstractSerializingTestCase { @Override protected MlMetadata doParseInstance(XContentParser parser) { - return MlMetadata.METADATA_PARSER.apply(parser, null).build(); + return MlMetadata.LENIENT_PARSER.apply(parser, null).build(); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java index 2fe2c0b334c..64110774535 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; -import org.elasticsearch.xpack.core.ml.MlParserType; import java.io.IOException; import java.util.HashMap; @@ -72,7 +71,7 @@ public class CategorizationAnalyzerConfigTests extends AbstractSerializingTestCa @Override protected CategorizationAnalyzerConfig doParseInstance(XContentParser parser) throws IOException { - return CategorizationAnalyzerConfig.buildFromXContentObject(parser, MlParserType.CONFIG); + return CategorizationAnalyzerConfig.buildFromXContentObject(parser, false); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java index e5f1c32fb8c..807e94c2d90 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java @@ -86,6 +86,6 @@ public class JobBuilderTests extends AbstractSerializingTestCase { @Override protected Job.Builder doParseInstance(XContentParser parser) { - return Job.CONFIG_PARSER.apply(parser, null); + return Job.STRICT_PARSER.apply(parser, null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java index a63d90178dc..8482e6f090c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java @@ -132,7 +132,7 @@ public class KerberosTicketValidator { * @param subject authenticated subject * @return a byte[] containing the token to be sent to the peer. null indicates * that no token is generated. - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception * @see GSSContext#acceptSecContext(byte[], int, int) */ private static byte[] acceptSecContext(final byte[] base64decodedTicket, final GSSContext gssContext, Subject subject) @@ -148,7 +148,7 @@ public class KerberosTicketValidator { * @param gssManager {@link GSSManager} * @param subject logged in {@link Subject} * @return {@link GSSCredential} for particular mechanism - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ private static GSSCredential createCredentials(final GSSManager gssManager, final Subject subject) throws PrivilegedActionException { return doAsWrapper(subject, (PrivilegedExceptionAction) () -> gssManager.createCredential(null, @@ -163,7 +163,7 @@ public class KerberosTicketValidator { * @param action {@link PrivilegedExceptionAction} action for performing inside * Subject.doAs * @return the value returned by the PrivilegedExceptionAction's run method - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ private static T doAsWrapper(final Subject subject, final PrivilegedExceptionAction action) throws PrivilegedActionException { try { diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.4.0.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.4.0.jar.sha1 deleted file mode 100644 index 80ba6c76aa3..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 new file mode 100644 index 00000000000..d38fb392c35 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 @@ -0,0 +1 @@ +471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index f1495f4f3ac..31d6312f662 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.settings.Settings; @@ -128,18 +129,22 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { () -> "Exception when waiting for [" + template + "] template to be created"); } - boolean existsWatcherIndex = adminClient().performRequest("HEAD", ".watches").getStatusLine().getStatusCode() == 200; + boolean existsWatcherIndex = adminClient() + .performRequest(new Request("HEAD", ".watches")) + .getStatusLine().getStatusCode() == 200; if (existsWatcherIndex == false) { return; } - Response response = adminClient().performRequest("GET", ".watches/_search", Collections.singletonMap("size", "1000")); + Request searchWatchesRequest = new Request("GET", ".watches/_search"); + searchWatchesRequest.addParameter("size", "1000"); + Response response = adminClient().performRequest(searchWatchesRequest); ObjectPath objectPathResponse = ObjectPath.createFromResponse(response); int totalHits = objectPathResponse.evaluate("hits.total"); if (totalHits > 0) { List> hits = objectPathResponse.evaluate("hits.hits"); for (Map hit : hits) { String id = (String) hit.get("_id"); - assertOK(adminClient().performRequest("DELETE", "_xpack/watcher/watch/" + id)); + adminClient().performRequest(new Request("DELETE", "_xpack/watcher/watch/" + id)); } } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index 474f69c70ed..e345e890db1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.notification.NotificationService; +import java.util.Collections; import java.util.List; import static java.util.Collections.emptyMap; @@ -74,7 +75,7 @@ public class WatcherPluginTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(Watch.INDEX, settings); AnalysisRegistry registry = new AnalysisRegistry(TestEnvironment.newEnvironment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()); - IndexModule indexModule = new IndexModule(indexSettings, registry, new InternalEngineFactory()); + IndexModule indexModule = new IndexModule(indexSettings, registry, new InternalEngineFactory(), Collections.emptyMap()); // this will trip an assertion if the watcher indexing operation listener is null (which it is) but we try to add it watcher.onIndexModule(indexModule); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index ba6f9e91678..b91092fe9e7 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.restart; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Request; @@ -15,9 +13,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; @@ -37,16 +33,12 @@ import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Base64; -import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -110,11 +102,13 @@ public class FullClusterRestartIT extends ESRestTestCase { String doc = "{\"test\": \"test\"}"; if (runningAgainstOldCluster) { - client().performRequest("PUT", docLocation, singletonMap("refresh", "true"), - new StringEntity(doc, ContentType.APPLICATION_JSON)); + Request createDoc = new Request("PUT", docLocation); + createDoc.addParameter("refresh", "true"); + createDoc.setJsonEntity(doc); + client().performRequest(createDoc); } - assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc)); + assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc)); } @SuppressWarnings("unchecked") @@ -124,8 +118,8 @@ public class FullClusterRestartIT extends ESRestTestCase { createRole("preupgrade_role"); } else { waitForYellow(".security"); - Response settingsResponse = client().performRequest("GET", "/.security/_settings/index.format"); - Map settingsResponseMap = toMap(settingsResponse); + Response settingsResponse = client().performRequest(new Request("GET", "/.security/_settings/index.format")); + Map settingsResponseMap = entityAsMap(settingsResponse); logger.info("settings response map {}", settingsResponseMap); final boolean needsUpgrade; final String concreteSecurityIndex; @@ -157,7 +151,8 @@ public class FullClusterRestartIT extends ESRestTestCase { "the native realm will not be operational until the upgrade API is run on the security index")); } // run upgrade API - Response upgradeResponse = client().performRequest("POST", "_xpack/migration/upgrade/" + concreteSecurityIndex); + Response upgradeResponse = client().performRequest( + new Request("POST", "_xpack/migration/upgrade/" + concreteSecurityIndex)); logger.info("upgrade response:\n{}", toStr(upgradeResponse)); } @@ -177,16 +172,19 @@ public class FullClusterRestartIT extends ESRestTestCase { public void testWatcher() throws Exception { if (runningAgainstOldCluster) { logger.info("Adding a watch on old cluster {}", oldClusterVersion); - client().performRequest("PUT", "_xpack/watcher/watch/bwc_watch", emptyMap(), - new StringEntity(loadWatch("simple-watch.json"), ContentType.APPLICATION_JSON)); + Request createBwcWatch = new Request("PUT", "_xpack/watcher/watch/bwc_watch"); + createBwcWatch.setJsonEntity(loadWatch("simple-watch.json")); + client().performRequest(createBwcWatch); logger.info("Adding a watch with \"fun\" throttle periods on old cluster"); - client().performRequest("PUT", "_xpack/watcher/watch/bwc_throttle_period", emptyMap(), - new StringEntity(loadWatch("throttle-period-watch.json"), ContentType.APPLICATION_JSON)); + Request createBwcThrottlePeriod = new Request("PUT", "_xpack/watcher/watch/bwc_throttle_period"); + createBwcThrottlePeriod.setJsonEntity(loadWatch("throttle-period-watch.json")); + client().performRequest(createBwcThrottlePeriod); logger.info("Adding a watch with \"fun\" read timeout on old cluster"); - client().performRequest("PUT", "_xpack/watcher/watch/bwc_funny_timeout", emptyMap(), - new StringEntity(loadWatch("funny-timeout-watch.json"), ContentType.APPLICATION_JSON)); + Request createFunnyTimeout = new Request("PUT", "_xpack/watcher/watch/bwc_funny_timeout"); + createFunnyTimeout.setJsonEntity(loadWatch("funny-timeout-watch.json")); + client().performRequest(createFunnyTimeout); logger.info("Waiting for watch results index to fill up..."); waitForYellow(".watches,bwc_watch_index,.watcher-history*"); @@ -198,7 +196,7 @@ public class FullClusterRestartIT extends ESRestTestCase { waitForYellow(".watches,bwc_watch_index,.watcher-history*"); logger.info("checking if the upgrade procedure on the new cluster is required"); - Map response = toMap(client().performRequest("GET", "/_xpack/migration/assistance")); + Map response = entityAsMap(client().performRequest(new Request("GET", "/_xpack/migration/assistance"))); logger.info(response); @SuppressWarnings("unchecked") Map indices = (Map) response.get("indices"); @@ -211,14 +209,16 @@ public class FullClusterRestartIT extends ESRestTestCase { logger.info("starting upgrade procedure on the new cluster"); - Map params = Collections.singletonMap("error_trace", "true"); - Map upgradeResponse = toMap(client().performRequest("POST", "_xpack/migration/upgrade/.watches", params)); + Request migrationAssistantRequest = new Request("POST", "_xpack/migration/upgrade/.watches"); + migrationAssistantRequest.addParameter("error_trace", "true"); + Map upgradeResponse = entityAsMap(client().performRequest(migrationAssistantRequest)); assertThat(upgradeResponse.get("timed_out"), equalTo(Boolean.FALSE)); // we posted 3 watches, but monitoring can post a few more assertThat((int) upgradeResponse.get("total"), greaterThanOrEqualTo(3)); logger.info("checking that upgrade procedure on the new cluster is no longer required"); - Map responseAfter = toMap(client().performRequest("GET", "/_xpack/migration/assistance")); + Map responseAfter = entityAsMap(client().performRequest( + new Request("GET", "/_xpack/migration/assistance"))); @SuppressWarnings("unchecked") Map indicesAfter = (Map) responseAfter.get("indices"); assertNull(indicesAfter.get(".watches")); } else { @@ -226,10 +226,10 @@ public class FullClusterRestartIT extends ESRestTestCase { } // Wait for watcher to actually start.... - Map startWatchResponse = toMap(client().performRequest("POST", "_xpack/watcher/_start")); + Map startWatchResponse = entityAsMap(client().performRequest(new Request("POST", "_xpack/watcher/_start"))); assertThat(startWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertBusy(() -> { - Map statsWatchResponse = toMap(client().performRequest("GET", "_xpack/watcher/stats")); + Map statsWatchResponse = entityAsMap(client().performRequest(new Request("GET", "_xpack/watcher/stats"))); @SuppressWarnings("unchecked") List states = ((List) statsWatchResponse.get("stats")) .stream().map(o -> ((Map) o).get("watcher_state")).collect(Collectors.toList()); @@ -244,10 +244,11 @@ public class FullClusterRestartIT extends ESRestTestCase { /* Shut down watcher after every test because watcher can be a bit finicky about shutting down when the node shuts * down. This makes super sure it shuts down *and* causes the test to fail in a sensible spot if it doesn't shut down. */ - Map stopWatchResponse = toMap(client().performRequest("POST", "_xpack/watcher/_stop")); + Map stopWatchResponse = entityAsMap(client().performRequest(new Request("POST", "_xpack/watcher/_stop"))); assertThat(stopWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertBusy(() -> { - Map statsStoppedWatchResponse = toMap(client().performRequest("GET", "_xpack/watcher/stats")); + Map statsStoppedWatchResponse = entityAsMap(client().performRequest( + new Request("GET", "_xpack/watcher/stats"))); @SuppressWarnings("unchecked") List states = ((List) statsStoppedWatchResponse.get("stats")) .stream().map(o -> ((Map) o).get("watcher_state")).collect(Collectors.toList()); @@ -297,12 +298,12 @@ public class FullClusterRestartIT extends ESRestTestCase { + "]" + "}"); - Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest)); + Map createRollupJobResponse = entityAsMap(client().performRequest(createRollupJobRequest)); assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // start the rollup job final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start"); - Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest)); + Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); assertRollUpJob("rollup-job-test"); @@ -315,7 +316,7 @@ public class FullClusterRestartIT extends ESRestTestCase { if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { clusterHealthRequest.addParameter("wait_for_no_initializing_shards", "true"); } - Map clusterHealthResponse = toMap(client().performRequest(clusterHealthRequest)); + Map clusterHealthResponse = entityAsMap(client().performRequest(clusterHealthRequest)); assertThat(clusterHealthResponse.get("timed_out"), equalTo(Boolean.FALSE)); assertRollUpJob("rollup-job-test"); @@ -327,14 +328,17 @@ public class FullClusterRestartIT extends ESRestTestCase { assumeTrue("It is only possible to build an index that sql doesn't like before 6.0.0", oldClusterVersion.before(Version.V_6_0_0_alpha1)); if (runningAgainstOldCluster) { - client().performRequest("POST", "/testsqlfailsonindexwithtwotypes/type1", emptyMap(), - new StringEntity("{}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/testsqlfailsonindexwithtwotypes/type2", emptyMap(), - new StringEntity("{}", ContentType.APPLICATION_JSON)); + Request doc1 = new Request("POST", "/testsqlfailsonindexwithtwotypes/type1"); + doc1.setJsonEntity("{}"); + client().performRequest(doc1); + Request doc2 = new Request("POST", "/testsqlfailsonindexwithtwotypes/type2"); + doc2.setJsonEntity("{}"); + client().performRequest(doc2); return; } - ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest("POST", "/_xpack/sql", emptyMap(), - new StringEntity("{\"query\":\"SELECT * FROM testsqlfailsonindexwithtwotypes\"}", ContentType.APPLICATION_JSON))); + Request sqlRequest = new Request("POST", "/_xpack/sql"); + sqlRequest.setJsonEntity("{\"query\":\"SELECT * FROM testsqlfailsonindexwithtwotypes\"}"); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(sqlRequest)); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); assertThat(e.getMessage(), containsString( "[testsqlfailsonindexwithtwotypes] contains more than one type [type1, type2] so it is incompatible with sql")); @@ -346,14 +350,14 @@ public class FullClusterRestartIT extends ESRestTestCase { @SuppressWarnings("unchecked") private void assertOldTemplatesAreDeleted() throws IOException { - Map templates = toMap(client().performRequest("GET", "/_template")); + Map templates = entityAsMap(client().performRequest(new Request("GET", "/_template"))); assertThat(templates.keySet(), not(hasItems(is("watches"), startsWith("watch-history"), is("triggered_watches")))); } @SuppressWarnings("unchecked") private void assertWatchIndexContentsWork() throws Exception { // Fetch a basic watch - Map bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_watch")); + Map bwcWatch = entityAsMap(client().performRequest(new Request("GET", "_xpack/watcher/watch/bwc_watch"))); logger.error("-----> {}", bwcWatch); @@ -368,7 +372,7 @@ public class FullClusterRestartIT extends ESRestTestCase { assertThat(ObjectPath.eval("actions.index_payload.index.timeout_in_millis", source), equalTo(timeout)); // Fetch a watch with "fun" throttle periods - bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_throttle_period")); + bwcWatch = entityAsMap(client().performRequest(new Request("GET", "_xpack/watcher/watch/bwc_throttle_period"))); assertThat(bwcWatch.get("found"), equalTo(true)); source = (Map) bwcWatch.get("watch"); assertEquals(timeout, source.get("throttle_period_in_millis")); @@ -378,7 +382,7 @@ public class FullClusterRestartIT extends ESRestTestCase { * Fetch a watch with a funny timeout to verify loading fractional time * values. */ - bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_funny_timeout")); + bwcWatch = entityAsMap(client().performRequest(new Request("GET", "_xpack/watcher/watch/bwc_funny_timeout"))); assertThat(bwcWatch.get("found"), equalTo(true)); source = (Map) bwcWatch.get("watch"); @@ -396,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase { // password doesn't come back because it is hidden assertThat(basic, hasEntry(is("password"), anyOf(startsWith("::es_encrypted::"), is("::es_redacted::")))); - Map history = toMap(client().performRequest("GET", ".watcher-history*/_search")); + Map history = entityAsMap(client().performRequest(new Request("GET", ".watcher-history*/_search"))); Map hits = (Map) history.get("hits"); assertThat((int) (hits.get("total")), greaterThanOrEqualTo(2)); } @@ -407,20 +411,20 @@ public class FullClusterRestartIT extends ESRestTestCase { .condition(InternalAlwaysCondition.INSTANCE) .trigger(ScheduleTrigger.builder(new IntervalSchedule(IntervalSchedule.Interval.seconds(1)))) .addAction("awesome", LoggingAction.builder(new TextTemplate("test"))).buildAsBytes(XContentType.JSON).utf8ToString(); - Map put = toMap(client().performRequest("PUT", "_xpack/watcher/watch/new_watch", emptyMap(), - new StringEntity(watch, ContentType.APPLICATION_JSON))); + Request createWatchRequest = new Request("PUT", "_xpack/watcher/watch/new_watch"); + createWatchRequest.setJsonEntity(watch); + Map createWatch = entityAsMap(client().performRequest(createWatchRequest)); - logger.info(put); + logger.info("create watch {}", createWatch); - assertThat(put.get("created"), equalTo(true)); - assertThat(put.get("_version"), equalTo(1)); + assertThat(createWatch.get("created"), equalTo(true)); + assertThat(createWatch.get("_version"), equalTo(1)); - put = toMap(client().performRequest("PUT", "_xpack/watcher/watch/new_watch", emptyMap(), - new StringEntity(watch, ContentType.APPLICATION_JSON))); - assertThat(put.get("created"), equalTo(false)); - assertThat(put.get("_version"), equalTo(2)); + Map updateWatch = entityAsMap(client().performRequest(createWatchRequest)); + assertThat(updateWatch.get("created"), equalTo(false)); + assertThat(updateWatch.get("_version"), equalTo(2)); - Map get = toMap(client().performRequest("GET", "_xpack/watcher/watch/new_watch")); + Map get = entityAsMap(client().performRequest(new Request("GET", "_xpack/watcher/watch/new_watch"))); assertThat(get.get("found"), equalTo(true)); @SuppressWarnings("unchecked") Map source = (Map) get.get("watch"); Map logging = ObjectPath.eval("actions.awesome.logging", source); @@ -429,23 +433,24 @@ public class FullClusterRestartIT extends ESRestTestCase { } private void waitForYellow(String indexName) throws IOException { - Map params = new HashMap<>(); - params.put("wait_for_status", "yellow"); - params.put("timeout", "30s"); - params.put("wait_for_no_relocating_shards", "true"); + Request request = new Request("GET", "/_cluster/health/" + indexName); + request.addParameter("wait_for_status", "yellow"); + request.addParameter("timeout", "30s"); + request.addParameter("wait_for_no_relocating_shards", "true"); if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { - params.put("wait_for_no_initializing_shards", "true"); + request.addParameter("wait_for_no_initializing_shards", "true"); } - Map response = toMap(client().performRequest("GET", "/_cluster/health/" + indexName, params)); + Map response = entityAsMap(client().performRequest(request)); assertThat(response.get("timed_out"), equalTo(Boolean.FALSE)); } @SuppressWarnings("unchecked") private void waitForHits(String indexName, int expectedHits) throws Exception { - Map params = singletonMap("size", "0"); + Request request = new Request("GET", "/" + indexName + "/_search"); + request.addParameter("size", "0"); assertBusy(() -> { try { - Map response = toMap(client().performRequest("GET", "/" + indexName + "/_search", params)); + Map response = entityAsMap(client().performRequest(request)); Map hits = (Map) response.get("hits"); int total = (int) hits.get("total"); assertThat(total, greaterThanOrEqualTo(expectedHits)); @@ -461,34 +466,26 @@ public class FullClusterRestartIT extends ESRestTestCase { }, 30, TimeUnit.SECONDS); } - static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - static Map toMap(String response) throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - static String toStr(Response response) throws IOException { return EntityUtils.toString(response.getEntity()); } private void createUser(final String id) throws Exception { - final String userJson = + Request request = new Request("PUT", "/_xpack/security/user/" + id); + request.setJsonEntity( "{\n" + " \"password\" : \"j@rV1s\",\n" + " \"roles\" : [ \"admin\", \"other_role1\" ],\n" + " \"full_name\" : \"" + randomAlphaOfLength(5) + "\",\n" + " \"email\" : \"" + id + "@example.com\",\n" + " \"enabled\": true\n" + - "}"; - - client().performRequest("PUT", "/_xpack/security/user/" + id, emptyMap(), - new StringEntity(userJson, ContentType.APPLICATION_JSON)); + "}"); + client().performRequest(request); } private void createRole(final String id) throws Exception { - final String roleJson = + Request request = new Request("PUT", "/_xpack/security/role/" + id); + request.setJsonEntity( "{\n" + " \"run_as\": [ \"abc\" ],\n" + " \"cluster\": [ \"monitor\" ],\n" + @@ -502,14 +499,12 @@ public class FullClusterRestartIT extends ESRestTestCase { " \"query\": \"{\\\"match\\\": {\\\"category\\\": \\\"click\\\"}}\"\n" + " }\n" + " ]\n" + - "}"; - - client().performRequest("PUT", "/_xpack/security/role/" + id, emptyMap(), - new StringEntity(roleJson, ContentType.APPLICATION_JSON)); + "}"); + client().performRequest(request); } private void assertUserInfo(final String user) throws Exception { - Map response = toMap(client().performRequest("GET", "/_xpack/security/user/" + user)); + Map response = entityAsMap(client().performRequest(new Request("GET", "/_xpack/security/user/" + user))); @SuppressWarnings("unchecked") Map userInfo = (Map) response.get(user); assertEquals(user + "@example.com", userInfo.get("email")); assertNotNull(userInfo.get("full_name")); @@ -518,7 +513,7 @@ public class FullClusterRestartIT extends ESRestTestCase { private void assertRoleInfo(final String role) throws Exception { @SuppressWarnings("unchecked") Map response = (Map) - toMap(client().performRequest("GET", "/_xpack/security/role/" + role)).get(role); + entityAsMap(client().performRequest(new Request("GET", "/_xpack/security/role/" + role))).get(role); assertNotNull(response.get("run_as")); assertNotNull(response.get("cluster")); assertNotNull(response.get("indices")); @@ -531,7 +526,7 @@ public class FullClusterRestartIT extends ESRestTestCase { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); - Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); + Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); if (job != null) { assertThat(ObjectPath.eval("status.job_state", job), expectedStates); @@ -541,7 +536,7 @@ public class FullClusterRestartIT extends ESRestTestCase { final Request taskRequest = new Request("GET", "_tasks"); taskRequest.addParameter("detailed", "true"); taskRequest.addParameter("actions", "xpack/rollup/*"); - Map taskResponse = toMap(client().performRequest(taskRequest)); + Map taskResponse = entityAsMap(client().performRequest(taskRequest)); Map taskResponseNodes = (Map) taskResponse.get("nodes"); Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next(); Map taskResponseTasks = (Map) taskResponseNode.get("tasks"); @@ -550,7 +545,7 @@ public class FullClusterRestartIT extends ESRestTestCase { // check that the rollup job is started using the Cluster State API final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); - Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); + Map clusterStateResponse = entityAsMap(client().performRequest(clusterStateRequest)); List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse); boolean hasRollupTask = false; diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 2981c0e4ec6..d2818bfc127 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -121,7 +121,7 @@ integTestRunner { if (project.rootProject.vagrantSupported == false) { integTest.enabled = false } else { - project.sourceSets.test.output.dir(generatedResources, builtBy: copyKeytabToGeneratedResources) + project.sourceSets.test.output.dir(generatedResources) integTestCluster.dependsOn krb5AddPrincipals, krb5kdcFixture, copyKeytabToGeneratedResources integTest.finalizedBy project(':test:fixtures:krb5kdc-fixture').halt }