diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 690bceb1e1a..dc90fcced31 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -4,7 +4,7 @@ lucene = 7.5.0-snapshot-608f0277b0 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.8.10 +jackson = 2.8.11 snakeyaml = 1.17 # when updating log4j, please update also docs/java-api/index.asciidoc log4j = 2.11.1 diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index d6acd0eb68b..3920988dcb9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -163,8 +163,11 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipel import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.io.Closeable; import java.io.IOException; @@ -1151,11 +1154,11 @@ public class RestHighLevelClient implements Closeable { List entries = map.entrySet().stream() .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) .collect(Collectors.toList()); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> TermSuggestion.fromXContent(parser, (String)context))); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> PhraseSuggestion.fromXContent(parser, (String)context))); - entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestion.NAME), + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestionBuilder.SUGGESTION_NAME), (parser, context) -> CompletionSuggestion.fromXContent(parser, (String)context))); return entries; } diff --git a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e26..00000000000 --- a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 00000000000..e7ad1e74ed6 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/distribution/packages/src/common/systemd/elasticsearch.service b/distribution/packages/src/common/systemd/elasticsearch.service index 409f04f76d0..a4d67d8830a 100644 --- a/distribution/packages/src/common/systemd/elasticsearch.service +++ b/distribution/packages/src/common/systemd/elasticsearch.service @@ -6,6 +6,7 @@ After=network-online.target [Service] RuntimeDirectory=elasticsearch +PrivateTmp=true Environment=ES_HOME=/usr/share/elasticsearch Environment=ES_PATH_CONF=${path.conf} Environment=PID_DIR=/var/run/elasticsearch diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index cf2e1e30be0..c3a03d77f81 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -21,4 +21,10 @@ Aggregations:: * The Percentiles and PercentileRanks aggregations now return `null` in the REST response, instead of `NaN`. This makes it consistent with the rest of the aggregations. Note: this only applies to the REST response, the java objects continue to return `NaN` (also - consistent with other aggregations) \ No newline at end of file + consistent with other aggregations) + +Suggesters:: +* Plugins that register suggesters can now define their own types of suggestions and must + explicitly indicate the type of suggestion that they produce. Existing plugins will + require changes to their plugin registration. See the `custom-suggester` example + plugin {pull}30284[#30284] \ No newline at end of file diff --git a/docs/reference/setup/important-settings.asciidoc b/docs/reference/setup/important-settings.asciidoc index b9b99b70803..8a9b59480a0 100644 --- a/docs/reference/setup/important-settings.asciidoc +++ b/docs/reference/setup/important-settings.asciidoc @@ -14,6 +14,7 @@ The following settings *must* be considered before going to production: * <> * <> * <> +* <> include::important-settings/path-settings.asciidoc[] @@ -31,4 +32,6 @@ include::important-settings/heap-dump-path.asciidoc[] include::important-settings/gc-logging.asciidoc[] +include::important-settings/es-tmpdir.asciidoc[] + include::important-settings/error-file.asciidoc[] diff --git a/docs/reference/setup/important-settings/es-tmpdir.asciidoc b/docs/reference/setup/important-settings/es-tmpdir.asciidoc new file mode 100644 index 00000000000..20959d969b8 --- /dev/null +++ b/docs/reference/setup/important-settings/es-tmpdir.asciidoc @@ -0,0 +1,23 @@ +[[es-tmpdir]] +=== Temp directory + +By default, Elasticsearch uses a private temporary directory that the startup +script creates immediately below the system temporary directory. + +On some Linux distributions a system utility will clean files and directories +from `/tmp` if they have not been recently accessed. This can lead to the +private temporary directory being removed while Elasticsearch is running if +features that require the temporary directory are not used for a long time. +This causes problems if a feature that requires the temporary directory is +subsequently used. + +If you install Elasticsearch using the `.deb` or `.rpm` packages and run it +under `systemd` then the private temporary directory that Elasticsearch uses +is excluded from periodic cleanup. + +However, if you intend to run the `.tar.gz` distribution on Linux for an +extended period then you should consider creating a dedicated temporary +directory for Elasticsearch that is not under a path that will have old files +and directories cleaned from it. This directory should have permissions set +so that only the user that Elasticsearch runs as can access it. Then set the +`$ES_TMPDIR` environment variable to point to it before starting Elasticsearch. diff --git a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e26..00000000000 --- a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 00000000000..e7ad1e74ed6 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 deleted file mode 100644 index 1d3e18e21a6..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c58cc9313ddf19f0900cd61ed044874278ce320 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 new file mode 100644 index 00000000000..378ba524422 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 @@ -0,0 +1 @@ +8b9826e16c3366764bfb7ad7362554f0471046c3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 deleted file mode 100644 index 4f4cacde220..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e853081fadaad3e98ed801937acc3d8f77580686 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 new file mode 100644 index 00000000000..510afb3df53 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 @@ -0,0 +1 @@ +d9d1c49c5d9d5e46e2aee55f3cdd119286fe0fc1 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 deleted file mode 100644 index 40bcb05f697..00000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e08caf1d787c825307d8cc6362452086020d853 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 new file mode 100644 index 00000000000..78a68d715ec --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 @@ -0,0 +1 @@ +2e77c6ff7342cd61ab1ae7cb14ed16aebfc8a72a \ No newline at end of file diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 1db1e4f7cac..4752c2b2685 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -279,10 +279,6 @@ public final class Def { String type = signature.substring(1, separator); String call = signature.substring(separator+1, separator2); int numCaptures = Integer.parseInt(signature.substring(separator2+1)); - Class captures[] = new Class[numCaptures]; - for (int capture = 0; capture < captures.length; capture++) { - captures[capture] = callSiteType.parameterType(i + 1 + capture); - } MethodHandle filter; Class interfaceType = method.typeParameters.get(i - 1 - replaced); if (signature.charAt(0) == 'S') { @@ -294,11 +290,15 @@ public final class Def { interfaceType, type, call, - captures); + numCaptures); } else if (signature.charAt(0) == 'D') { // the interface type is now known, but we need to get the implementation. // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) + Class captures[] = new Class[numCaptures]; + for (int capture = 0; capture < captures.length; capture++) { + captures[capture] = callSiteType.parameterType(i + 1 + capture); + } MethodType nestedType = MethodType.methodType(interfaceType, captures); CallSite nested = DefBootstrap.bootstrap(painlessLookup, localMethods, @@ -331,57 +331,34 @@ public final class Def { */ static MethodHandle lookupReference(PainlessLookup painlessLookup, Map localMethods, MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class receiverClass, String name) throws Throwable { - Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); - PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(interfaceType).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size(); - PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); + Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); + PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType); + if (interfaceMethod == null) { + throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); + } + int arity = interfaceMethod.typeParameters.size(); + PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup, interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), - implMethod.javaMethod.getName(), receiverClass); + implMethod.javaMethod.getName(), 1); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, Map localMethods, - MethodHandles.Lookup methodHandlesLookup, Class clazz, String type, String call, Class... captures) throws Throwable { - final FunctionRef ref; - if ("this".equals(type)) { - // user written method - PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(clazz).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size() + captures.length; - LocalMethod localMethod = localMethods.get(Locals.buildLocalMethodKey(call, arity)); - if (localMethod == null) { - // is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail - // because the arity does not match the expected interface type. - if (call.contains("$")) { - throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() + - "] in [" + clazz + "]"); - } - throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments."); - } - ref = new FunctionRef(clazz, interfaceMethod, call, localMethod.methodType, captures.length); - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length); - } - final CallSite callSite = LambdaBootstrap.lambdaBootstrap( - methodHandlesLookup, - ref.interfaceMethodName, - ref.factoryMethodType, - ref.interfaceMethodType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateMethodType, - ref.isDelegateInterface ? 1 : 0 - ); - return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, captures)); + MethodHandles.Lookup methodHandlesLookup, Class clazz, String type, String call, int captures) throws Throwable { + final FunctionRef ref = FunctionRef.create(painlessLookup, localMethods, null, clazz, type, call, captures); + final CallSite callSite = LambdaBootstrap.lambdaBootstrap( + methodHandlesLookup, + ref.interfaceMethodName, + ref.factoryMethodType, + ref.interfaceMethodType, + ref.delegateClassName, + ref.delegateInvokeType, + ref.delegateMethodName, + ref.delegateMethodType, + ref.isDelegateInterface ? 1 : 0 + ); + return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, ref.factoryMethodType.parameterArray())); } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index cc558489446..065f63dc3f5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -20,17 +20,17 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Locals.LocalMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.objectweb.asm.Type; import java.lang.invoke.MethodType; -import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Objects; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE; @@ -39,251 +39,210 @@ import static org.objectweb.asm.Opcodes.H_INVOKEVIRTUAL; import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL; /** - * Reference to a function or lambda. - *

- * Once you have created one of these, you have "everything you need" to call {@link LambdaBootstrap} - * either statically from bytecode with invokedynamic, or at runtime from Java. + * Contains all the values necessary to write the instruction to initiate a + * {@link LambdaBootstrap} for either a function reference or a user-defined + * lambda function. */ public class FunctionRef { + /** + * Creates a new FunctionRef which will resolve {@code type::call} from the whitelist. + * @param painlessLookup the whitelist against which this script is being compiled + * @param localMethods user-defined and synthetic methods generated directly on the script class + * @param location the character number within the script at compile-time + * @param targetClass functional interface type to implement. + * @param typeName the left hand side of a method reference expression + * @param methodName the right hand side of a method reference expression + * @param numberOfCaptures number of captured arguments + */ + public static FunctionRef create(PainlessLookup painlessLookup, Map localMethods, Location location, + Class targetClass, String typeName, String methodName, int numberOfCaptures) { + + Objects.requireNonNull(painlessLookup); + Objects.requireNonNull(targetClass); + Objects.requireNonNull(typeName); + Objects.requireNonNull(methodName); + + String targetClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); + PainlessMethod interfaceMethod; + + try { + try { + interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " + + "to a non-functional interface [" + targetClassName + "]", iae); + } + + String interfaceMethodName = interfaceMethod.javaMethod.getName(); + MethodType interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); + String delegateClassName; + boolean isDelegateInterface; + int delegateInvokeType; + String delegateMethodName; + MethodType delegateMethodType; + + Class delegateMethodReturnType; + List> delegateMethodParameters; + int interfaceTypeParametersSize = interfaceMethod.typeParameters.size(); + + if ("this".equals(typeName)) { + Objects.requireNonNull(localMethods); + + if (numberOfCaptures < 0) { + throw new IllegalStateException("internal error"); + } + + String localMethodKey = Locals.buildLocalMethodKey(methodName, numberOfCaptures + interfaceTypeParametersSize); + LocalMethod localMethod = localMethods.get(localMethodKey); + + if (localMethod == null) { + throw new IllegalArgumentException("function reference [this::" + localMethodKey + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found" + (localMethodKey.contains("$") ? " due to an incorrect number of arguments" : "") + ); + } + + delegateClassName = CLASS_NAME; + isDelegateInterface = false; + delegateInvokeType = H_INVOKESTATIC; + delegateMethodName = localMethod.name; + delegateMethodType = localMethod.methodType; + + delegateMethodReturnType = localMethod.returnType; + delegateMethodParameters = localMethod.typeParameters; + } else if ("new".equals(methodName)) { + if (numberOfCaptures != 0) { + throw new IllegalStateException("internal error"); + } + + PainlessConstructor painlessConstructor; + + try { + painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found", iae); + } + + delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName(); + isDelegateInterface = false; + delegateInvokeType = H_NEWINVOKESPECIAL; + delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; + delegateMethodType = painlessConstructor.methodType; + + delegateMethodReturnType = painlessConstructor.javaConstructor.getDeclaringClass(); + delegateMethodParameters = painlessConstructor.typeParameters; + } else { + if (numberOfCaptures != 0 && numberOfCaptures != 1) { + throw new IllegalStateException("internal error"); + } + + boolean captured = numberOfCaptures == 1; + PainlessMethod painlessMethod; + + try { + painlessMethod = painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize); + + if (captured) { + throw new IllegalStateException("internal error"); + } + } catch (IllegalArgumentException staticIAE) { + try { + painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName, + captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException( + "function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found", iae); + } + } + + delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName(); + isDelegateInterface = painlessMethod.javaMethod.getDeclaringClass().isInterface(); + + if (Modifier.isStatic(painlessMethod.javaMethod.getModifiers())) { + delegateInvokeType = H_INVOKESTATIC; + } else if (isDelegateInterface) { + delegateInvokeType = H_INVOKEINTERFACE; + } else { + delegateInvokeType = H_INVOKEVIRTUAL; + } + + delegateMethodName = painlessMethod.javaMethod.getName(); + delegateMethodType = painlessMethod.methodType; + + delegateMethodReturnType = painlessMethod.returnType; + + if (delegateMethodType.parameterList().size() > painlessMethod.typeParameters.size()) { + delegateMethodParameters = new ArrayList<>(painlessMethod.typeParameters); + delegateMethodParameters.add(0, delegateMethodType.parameterType(0)); + } else { + delegateMethodParameters = painlessMethod.typeParameters; + } + } + + if (location != null) { + for (int typeParameter = 0; typeParameter < interfaceTypeParametersSize; ++typeParameter) { + Class from = interfaceMethod.typeParameters.get(typeParameter); + Class to = delegateMethodParameters.get(numberOfCaptures + typeParameter); + AnalyzerCaster.getLegalCast(location, from, to, false, true); + } + + if (interfaceMethod.returnType != void.class) { + AnalyzerCaster.getLegalCast(location, delegateMethodReturnType, interfaceMethod.returnType, false, true); + } + } + + MethodType factoryMethodType = MethodType.methodType(targetClass, + delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount())); + delegateMethodType = delegateMethodType.dropParameterTypes(0, numberOfCaptures); + + return new FunctionRef(interfaceMethodName, interfaceMethodType, + delegateClassName, isDelegateInterface, delegateInvokeType, delegateMethodName, delegateMethodType, + factoryMethodType + ); + } catch (IllegalArgumentException iae) { + if (location != null) { + throw location.createError(iae); + } + + throw iae; + } + } + /** functional interface method name */ public final String interfaceMethodName; - /** factory (CallSite) method signature */ - public final MethodType factoryMethodType; /** functional interface method signature */ public final MethodType interfaceMethodType; /** class of the delegate method to be called */ public final String delegateClassName; + /** whether a call is made on a delegate interface */ + public final boolean isDelegateInterface; /** the invocation type of the delegate method */ public final int delegateInvokeType; /** the name of the delegate method */ public final String delegateMethodName; /** delegate method signature */ public final MethodType delegateMethodType; + /** factory (CallSite) method signature */ + public final MethodType factoryMethodType; - /** interface method */ - public final PainlessMethod interfaceMethod; - /** delegate method type parameters */ - public final List> delegateTypeParameters; - /** delegate method return type */ - public final Class delegateReturnType; + private FunctionRef( + String interfaceMethodName, MethodType interfaceMethodType, + String delegateClassName, boolean isDelegateInterface, + int delegateInvokeType, String delegateMethodName, MethodType delegateMethodType, + MethodType factoryMethodType) { - /** factory method type descriptor */ - public final String factoryDescriptor; - /** functional interface method as type */ - public final Type interfaceType; - /** delegate method type method as type */ - public final Type delegateType; - - /** whether a call is made on a delegate interface */ - public final boolean isDelegateInterface; - - /** - * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist. - * @param painlessLookup the whitelist against which this script is being compiled - * @param expected functional interface type to implement. - * @param type the left hand side of a method reference expression - * @param call the right hand side of a method reference expression - * @param numCaptures number of captured arguments - */ - public static FunctionRef resolveFromLookup( - PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { - - if ("new".equals(call)) { - return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod, - lookup(painlessLookup, expected, type), numCaptures); - } else { - return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod, - lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); - } - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateConstructor implementation constructor - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) { - Constructor javaConstructor = delegateConstructor.javaConstructor; - MethodType delegateMethodType = delegateConstructor.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = javaConstructor.getDeclaringClass().getName(); - this.isDelegateInterface = false; - this.delegateInvokeType = H_NEWINVOKESPECIAL; - this.delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateConstructor.typeParameters; - this.delegateReturnType = void.class; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = delegateMethod.javaMethod.getDeclaringClass().getName(); - this.isDelegateInterface = delegateMethod.javaMethod.getDeclaringClass().isInterface(); - - if (Modifier.isStatic(delegateMethod.javaMethod.getModifiers())) { - this.delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.javaMethod.getDeclaringClass().isInterface()) { - this.delegateInvokeType = H_INVOKEINTERFACE; - } else { - this.delegateInvokeType = H_INVOKEVIRTUAL; - } - - this.delegateMethodName = delegateMethod.javaMethod.getName(); - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, LocalMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.isDelegateInterface = false; - this.delegateInvokeType = H_INVOKESTATIC; - - this.delegateMethodName = delegateMethod.name; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (low level). - * It is for runtime use only. - */ - public FunctionRef(Class expected, - PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) { - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.delegateInvokeType = H_INVOKESTATIC; + this.interfaceMethodName = interfaceMethodName; + this.interfaceMethodType = interfaceMethodType; + this.delegateClassName = delegateClassName; + this.isDelegateInterface = isDelegateInterface; + this.delegateInvokeType = delegateInvokeType; this.delegateMethodName = delegateMethodName; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - this.isDelegateInterface = false; - - this.interfaceMethod = null; - this.delegateTypeParameters = null; - this.delegateReturnType = null; - - this.factoryDescriptor = null; - this.interfaceType = null; - this.delegateType = null; - } - - /** - * Looks up {@code type} from the whitelist, and returns a matching constructor. - */ - private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class expected, String type) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - - // lookup requested constructor - PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type)); - PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.typeParameters.size())); - - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]"); - } - - return impl; - } - - /** - * Looks up {@code type::call} from the whitelist, and returns a matching method. - */ - private static PainlessMethod lookup(PainlessLookup painlessLookup, Class expected, - String type, String call, boolean receiverCaptured) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - - // lookup requested method - PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type)); - final PainlessMethod impl; - // look for a static impl first - PainlessMethod staticImpl = - struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.typeParameters.size())); - if (staticImpl == null) { - // otherwise a virtual impl - final int arity; - if (receiverCaptured) { - // receiver captured - arity = method.typeParameters.size(); - } else { - // receiver passed - arity = method.typeParameters.size() - 1; - } - impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); - } else { - impl = staticImpl; - } - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " + - "[" + expected + "]"); - } - return impl; + this.delegateMethodType = delegateMethodType; + this.factoryMethodType = factoryMethodType; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index 72435562a3b..df5f7966c35 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -56,6 +56,7 @@ import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_EXPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; +import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; @@ -439,4 +440,18 @@ public final class MethodWriter extends GeneratorAdapter { invokeVirtual(type, method); } } + + public void invokeLambdaCall(FunctionRef functionRef) { + invokeDynamic( + functionRef.interfaceMethodName, + functionRef.factoryMethodType.toMethodDescriptorString(), + LAMBDA_BOOTSTRAP_HANDLE, + Type.getMethodType(functionRef.interfaceMethodType.toMethodDescriptorString()), + functionRef.delegateClassName, + functionRef.delegateInvokeType, + functionRef.delegateMethodName, + Type.getMethodType(functionRef.delegateMethodType.toMethodDescriptorString()), + functionRef.isDelegateInterface ? 1 : 0 + ); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 835bfb5c505..50bb79dcfbd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -35,13 +35,13 @@ public final class PainlessClass { public final Map getterMethodHandles; public final Map setterMethodHandles; - public final PainlessMethod functionalMethod; + public final PainlessMethod functionalInterfaceMethod; PainlessClass(Map constructors, Map staticMethods, Map methods, Map staticFields, Map fields, Map getterMethodHandles, Map setterMethodHandles, - PainlessMethod functionalMethod) { + PainlessMethod functionalInterfaceMethod) { this.constructors = Collections.unmodifiableMap(constructors); @@ -54,6 +54,6 @@ public final class PainlessClass { this.getterMethodHandles = Collections.unmodifiableMap(getterMethodHandles); this.setterMethodHandles = Collections.unmodifiableMap(setterMethodHandles); - this.functionalMethod = functionalMethod; + this.functionalInterfaceMethod = functionalInterfaceMethod; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 866f711ba0f..a61215e9ed7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -35,7 +35,7 @@ final class PainlessClassBuilder { final Map getterMethodHandles; final Map setterMethodHandles; - PainlessMethod functionalMethod; + PainlessMethod functionalInterfaceMethod; PainlessClassBuilder() { constructors = new HashMap<>(); @@ -49,11 +49,11 @@ final class PainlessClassBuilder { getterMethodHandles = new HashMap<>(); setterMethodHandles = new HashMap<>(); - functionalMethod = null; + functionalInterfaceMethod = null; } PainlessClass build() { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, - getterMethodHandles, setterMethodHandles, functionalMethod); + getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 786248f7269..adaf45aaa0b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -62,6 +62,14 @@ public final class PainlessLookup { return classesToPainlessClasses.get(targetClass); } + public PainlessConstructor lookupPainlessConstructor(String targetClassName, int constructorArity) { + Objects.requireNonNull(targetClassName); + + Class targetClass = canonicalTypeNameToType(targetClassName); + + return lookupPainlessConstructor(targetClass, constructorArity); + } + public PainlessConstructor lookupPainlessConstructor(Class targetClass, int constructorArity) { Objects.requireNonNull(targetClass); @@ -83,6 +91,14 @@ public final class PainlessLookup { return painlessConstructor; } + public PainlessMethod lookupPainlessMethod(String targetClassName, boolean isStatic, String methodName, int methodArity) { + Objects.requireNonNull(targetClassName); + + Class targetClass = canonicalTypeNameToType(targetClassName); + + return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity); + } + public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStatic, String methodName, int methodArity) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); @@ -111,6 +127,14 @@ public final class PainlessLookup { return painlessMethod; } + public PainlessField lookupPainlessField(String targetClassName, boolean isStatic, String fieldName) { + Objects.requireNonNull(targetClassName); + + Class targetClass = canonicalTypeNameToType(targetClassName); + + return lookupPainlessField(targetClass, isStatic, fieldName); + } + public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, String fieldName) { Objects.requireNonNull(targetClass); Objects.requireNonNull(fieldName); @@ -134,4 +158,20 @@ public final class PainlessLookup { return painlessField; } + + public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class targetClass) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + + if (targetPainlessClass == null) { + throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] not found"); + } + + PainlessMethod functionalInterfacePainlessMethod = targetPainlessClass.functionalInterfaceMethod; + + if (functionalInterfacePainlessMethod == null) { + throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] is not a functional interface"); + } + + return functionalInterfacePainlessMethod; + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 799650c2c5d..45a5e188db3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -875,7 +875,7 @@ public final class PainlessLookupBuilder { } else if (javaMethods.size() == 1) { java.lang.reflect.Method javaMethod = javaMethods.get(0); String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount()); - painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey); + painlessClassBuilder.functionalInterfaceMethod = painlessClassBuilder.methods.get(painlessMethodKey); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e78b3c67210..a649fa7611c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; @@ -35,8 +34,6 @@ import org.objectweb.asm.Type; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a capturing function reference. */ @@ -76,23 +73,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda defPointer = null; // static case if (captured.clazz != def.class) { - try { - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, - PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < ref.interfaceMethod.typeParameters.size(); ++i) { - Class from = ref.interfaceMethod.typeParameters.get(i); - Class to = ref.delegateTypeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (ref.interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.returnType, false, true); - } - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, + expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); } actual = expected; } @@ -114,17 +96,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 692581d8118..c97cc66c7c7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -19,22 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a function reference. */ @@ -63,39 +57,7 @@ public final class EFunctionRef extends AExpression implements ILambda { defPointer = "S" + type + "." + call + ",0"; } else { defPointer = null; - try { - if ("this".equals(type)) { - // user's own function - PainlessMethod interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - LocalMethod delegateMethod = locals.getMethod(call, interfaceMethod.typeParameters.size()); - if (delegateMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found"); - } - ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = delegateMethod.typeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, delegateMethod.returnType, interfaceMethod.returnType, false, true); - } - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0); - } - - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, expected, type, call, 0); actual = expected; } } @@ -104,17 +66,7 @@ public final class EFunctionRef extends AExpression implements ILambda { void write(MethodWriter writer, Globals globals) { if (ref != null) { writer.writeDebugInfo(location); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // TODO: don't do this: its just to cutover :) writer.push((String)null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index ecd11ce1bf7..af906416ca7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -19,11 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; @@ -40,8 +38,6 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Lambda expression node. *

@@ -122,7 +118,7 @@ public final class ELambda extends AExpression implements ILambda { } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod; + interfaceMethod = locals.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(expected); if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); @@ -184,25 +180,8 @@ public final class ELambda extends AExpression implements ILambda { defPointer = "Sthis." + name + "," + captures.size(); } else { defPointer = null; - try { - LocalMethod localMethod = - new LocalMethod(desugared.name, desugared.returnType, desugared.typeParameters, desugared.methodType); - ref = new FunctionRef(expected, interfaceMethod, localMethod, captures.size()); - } catch (IllegalArgumentException e) { - throw createError(e); - } - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = desugared.parameters.get(i + captures.size()).clazz; - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, desugared.returnType, interfaceMethod.returnType, false, true); - } - + ref = FunctionRef.create( + locals.getPainlessLookup(), locals.getMethods(), location, expected, "this", desugared.name, captures.size()); actual = expected; } } @@ -218,17 +197,7 @@ public final class ELambda extends AExpression implements ILambda { writer.visitVarInsn(MethodWriter.getType(capture.clazz).getOpcode(Opcodes.ILOAD), capture.getSlot()); } - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // placeholder writer.push((String)null); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java index fd47db6b83d..5829593f524 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java @@ -27,7 +27,6 @@ import java.lang.invoke.LambdaConversionException; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.startsWith; public class FunctionRefTests extends ScriptTestCase { @@ -193,14 +192,15 @@ public class FunctionRefTests extends ScriptTestCase { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator")); } public void testQualifiedMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), + containsString("function reference [org.joda.time.ReadableDateTime::bogus/2] matching [java.util.Comparator")); } public void testClassMissing() { @@ -223,11 +223,12 @@ public class FunctionRefTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); }); - assertThat(expected.getMessage(), containsString("Cannot convert function reference")); + assertThat(expected.getMessage(), + containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]")); } public void testIncompatible() { - expectScriptThrows(BootstrapMethodError.class, () -> { + expectScriptThrows(ClassCastException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); }); } @@ -236,28 +237,32 @@ public class FunctionRefTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Optional.empty().orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnough() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertTrue(expected.getMessage().contains("Unknown reference")); + assertThat(expected.getMessage(), containsString( + "function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnoughDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testReturnVoid() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index 20e257e5747..1f1a6f95b36 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -184,7 +184,7 @@ public class LambdaTests extends ScriptTestCase { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); }); - assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testWrongArityNotEnough() { @@ -200,7 +200,7 @@ public class LambdaTests extends ScriptTestCase { exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); }); - assertTrue(expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testLambdaInFunction() { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index e28fbead29a..f18efe4585b 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -756,7 +756,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder> IFD getForField(MappedFieldType fieldType) { - IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndexName()); + IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName()); IndexFieldDataCache cache = new IndexFieldDataCache.None(); CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker, @@ -764,5 +764,4 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { + + // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 + @Override + public Suggest.Suggestion> innerExecute( + String name, + CustomSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare) { + + // Get the suggestion context + String text = suggestion.getText().utf8ToString(); + + // create two suggestions with 12 and 123 appended + CustomSuggestion response = new CustomSuggestion(name, suggestion.getSize(), "suggestion-dummy-value"); + + CustomSuggestion.Entry entry = new CustomSuggestion.Entry(new Text(text), 0, text.length(), "entry-dummy-value"); + + String firstOption = + String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12"); + CustomSuggestion.Entry.Option option12 = new CustomSuggestion.Entry.Option(new Text(firstOption), 0.9f, "option-dummy-value-1"); + entry.addOption(option12); + + String secondOption = + String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123"); + CustomSuggestion.Entry.Option option123 = new CustomSuggestion.Entry.Option(new Text(secondOption), 0.8f, "option-dummy-value-2"); + entry.addOption(option123); + + response.addTerm(entry); + + return response; + } +} diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java new file mode 100644 index 00000000000..91ffa672e53 --- /dev/null +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggesterPlugin.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.customsuggester; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; + +import java.util.Collections; +import java.util.List; + +public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + @Override + public List> getSuggesters() { + return Collections.singletonList( + new SearchPlugin.SuggesterSpec<>( + CustomSuggestionBuilder.SUGGESTION_NAME, + CustomSuggestionBuilder::new, + CustomSuggestionBuilder::fromXContent, + CustomSuggestion::new + ) + ); + } +} diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java new file mode 100644 index 00000000000..f7ec27b7af0 --- /dev/null +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java @@ -0,0 +1,227 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.customsuggester; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.suggest.Suggest; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class CustomSuggestion extends Suggest.Suggestion { + + public static final int TYPE = 999; + + public static final ParseField DUMMY = new ParseField("dummy"); + + private String dummy; + + public CustomSuggestion(String name, int size, String dummy) { + super(name, size); + this.dummy = dummy; + } + + public CustomSuggestion(StreamInput in) throws IOException { + super(in); + dummy = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(dummy); + } + + @Override + public String getWriteableName() { + return CustomSuggestionBuilder.SUGGESTION_NAME; + } + + @Override + public int getWriteableType() { + return TYPE; + } + + /** + * A meaningless value used to test that plugin suggesters can add fields to their Suggestion types + * + * This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place + * to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior + */ + public String getDummy() { + return dummy; + } + + @Override + protected Entry newEntry() { + return new Entry(); + } + + @Override + protected Entry newEntry(StreamInput in) throws IOException { + return new Entry(in); + } + + public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { + CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); + parseEntries(parser, suggestion, Entry::fromXContent); + return suggestion; + } + + public static class Entry extends Suggest.Suggestion.Entry { + + private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); + + static { + declareCommonFields(PARSER); + PARSER.declareString((entry, dummy) -> entry.dummy = dummy, DUMMY); + PARSER.declareObjectArray(Entry::addOptions, (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); + } + + private String dummy; + + public Entry() {} + + public Entry(Text text, int offset, int length, String dummy) { + super(text, offset, length); + this.dummy = dummy; + } + + public Entry(StreamInput in) throws IOException { + super(in); + dummy = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(dummy); + } + + @Override + protected Option newOption() { + return new Option(); + } + + @Override + protected Option newOption(StreamInput in) throws IOException { + return new Option(in); + } + + /* + * the value of dummy will always be the same, so this just tests that we can merge entries with custom fields + */ + @Override + protected void merge(Suggest.Suggestion.Entry

+ * The getInput... methods return the actual number of + * fields/records sent the the API including invalid records. + * The getProcessed... methods are the number sent to the + * Engine. + *

+ * The inputRecordCount field is calculated so it + * should not be set in deserialization but it should be serialised + * so the field is visible. + */ +public class DataCounts implements ToXContentObject { + + public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count"); + public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count"); + public static final ParseField INPUT_BYTES = new ParseField("input_bytes"); + public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count"); + public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count"); + public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count"); + public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count"); + public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count"); + public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count"); + public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count"); + public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); + public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp"); + public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp"); + public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); + public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp"); + public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_counts", true, + a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3], (long) a[4], (long) a[5], (long) a[6], + (long) a[7], (long) a[8], (long) a[9], (long) a[10], (Date) a[11], (Date) a[12], (Date) a[13], (Date) a[14], + (Date) a[15])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), + EARLIEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), + LATEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), + LAST_DATA_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), + LATEST_EMPTY_BUCKET_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), + LATEST_SPARSE_BUCKET_TIME, + ValueType.VALUE); + } + + private final String jobId; + private long processedRecordCount; + private long processedFieldCount; + private long inputBytes; + private long inputFieldCount; + private long invalidDateCount; + private long missingFieldCount; + private long outOfOrderTimeStampCount; + private long emptyBucketCount; + private long sparseBucketCount; + private long bucketCount; + private Date earliestRecordTimeStamp; + private Date latestRecordTimeStamp; + private Date lastDataTimeStamp; + private Date latestEmptyBucketTimeStamp; + private Date latestSparseBucketTimeStamp; + + public DataCounts(String jobId, long processedRecordCount, long processedFieldCount, long inputBytes, + long inputFieldCount, long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount, + long emptyBucketCount, long sparseBucketCount, long bucketCount, + Date earliestRecordTimeStamp, Date latestRecordTimeStamp, Date lastDataTimeStamp, + Date latestEmptyBucketTimeStamp, Date latestSparseBucketTimeStamp) { + this.jobId = jobId; + this.processedRecordCount = processedRecordCount; + this.processedFieldCount = processedFieldCount; + this.inputBytes = inputBytes; + this.inputFieldCount = inputFieldCount; + this.invalidDateCount = invalidDateCount; + this.missingFieldCount = missingFieldCount; + this.outOfOrderTimeStampCount = outOfOrderTimeStampCount; + this.emptyBucketCount = emptyBucketCount; + this.sparseBucketCount = sparseBucketCount; + this.bucketCount = bucketCount; + this.latestRecordTimeStamp = latestRecordTimeStamp; + this.earliestRecordTimeStamp = earliestRecordTimeStamp; + this.lastDataTimeStamp = lastDataTimeStamp; + this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp; + this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp; + } + + DataCounts(String jobId) { + this.jobId = jobId; + } + + public DataCounts(DataCounts lhs) { + jobId = lhs.jobId; + processedRecordCount = lhs.processedRecordCount; + processedFieldCount = lhs.processedFieldCount; + inputBytes = lhs.inputBytes; + inputFieldCount = lhs.inputFieldCount; + invalidDateCount = lhs.invalidDateCount; + missingFieldCount = lhs.missingFieldCount; + outOfOrderTimeStampCount = lhs.outOfOrderTimeStampCount; + emptyBucketCount = lhs.emptyBucketCount; + sparseBucketCount = lhs.sparseBucketCount; + bucketCount = lhs.bucketCount; + latestRecordTimeStamp = lhs.latestRecordTimeStamp; + earliestRecordTimeStamp = lhs.earliestRecordTimeStamp; + lastDataTimeStamp = lhs.lastDataTimeStamp; + latestEmptyBucketTimeStamp = lhs.latestEmptyBucketTimeStamp; + latestSparseBucketTimeStamp = lhs.latestSparseBucketTimeStamp; + } + + public String getJobId() { + return jobId; + } + + /** + * Number of records processed by this job. + * This value is the number of records sent passed on to + * the engine i.e. {@linkplain #getInputRecordCount()} minus + * records with bad dates or out of order + * + * @return Number of records processed by this job {@code long} + */ + public long getProcessedRecordCount() { + return processedRecordCount; + } + + /** + * Number of data points (processed record count * the number + * of analysed fields) processed by this job. This count does + * not include the time field. + * + * @return Number of data points processed by this job {@code long} + */ + public long getProcessedFieldCount() { + return processedFieldCount; + } + + /** + * Total number of input records read. + * This = processed record count + date parse error records count + * + out of order record count. + *

+ * Records with missing fields are counted as they are still written. + * + * @return Total number of input records read {@code long} + */ + public long getInputRecordCount() { + return processedRecordCount + outOfOrderTimeStampCount + + invalidDateCount; + } + + /** + * The total number of bytes sent to this job. + * This value includes the bytes from any records + * that have been discarded for any reason + * e.g. because the date cannot be read + * + * @return Volume in bytes + */ + public long getInputBytes() { + return inputBytes; + } + + /** + * The total number of fields sent to the job + * including fields that aren't analysed. + * + * @return The total number of fields sent to the job + */ + public long getInputFieldCount() { + return inputFieldCount; + } + + /** + * The number of records with an invalid date field that could + * not be parsed or converted to epoch time. + * + * @return The number of records with an invalid date field + */ + public long getInvalidDateCount() { + return invalidDateCount; + } + + /** + * The number of missing fields that had been + * configured for analysis. + * + * @return The number of missing fields + */ + public long getMissingFieldCount() { + return missingFieldCount; + } + + /** + * The number of records with a timestamp that is + * before the time of the latest record. Records should + * be in ascending chronological order + * + * @return The number of records with a timestamp that is before the time of the latest record + */ + public long getOutOfOrderTimeStampCount() { + return outOfOrderTimeStampCount; + } + + /** + * The number of buckets with no records in it. Used to measure general data fitness and/or + * configuration problems (bucket span). + * + * @return Number of empty buckets processed by this job {@code long} + */ + public long getEmptyBucketCount() { + return emptyBucketCount; + } + + /** + * The number of buckets with few records compared to the overall counts. + * Used to measure general data fitness and/or configuration problems (bucket span). + * + * @return Number of sparse buckets processed by this job {@code long} + */ + public long getSparseBucketCount() { + return sparseBucketCount; + } + + /** + * The number of buckets overall. + * + * @return Number of buckets processed by this job {@code long} + */ + public long getBucketCount() { + return bucketCount; + } + + /** + * The time of the first record seen. + * + * @return The first record time + */ + public Date getEarliestRecordTimeStamp() { + return earliestRecordTimeStamp; + } + + /** + * The time of the latest record seen. + * + * @return Latest record time + */ + public Date getLatestRecordTimeStamp() { + return latestRecordTimeStamp; + } + + /** + * The wall clock time the latest record was seen. + * + * @return Wall clock time of the lastest record + */ + public Date getLastDataTimeStamp() { + return lastDataTimeStamp; + } + + /** + * The time of the latest empty bucket seen. + * + * @return Latest empty bucket time + */ + public Date getLatestEmptyBucketTimeStamp() { + return latestEmptyBucketTimeStamp; + } + + /** + * The time of the latest sparse bucket seen. + * + * @return Latest sparse bucket time + */ + public Date getLatestSparseBucketTimeStamp() { + return latestSparseBucketTimeStamp; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount); + builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount); + builder.field(INPUT_BYTES.getPreferredName(), inputBytes); + builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount); + builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount); + builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount); + builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount); + builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount); + builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); + builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); + if (earliestRecordTimeStamp != null) { + builder.timeField(EARLIEST_RECORD_TIME.getPreferredName(), EARLIEST_RECORD_TIME.getPreferredName() + "_string", + earliestRecordTimeStamp.getTime()); + } + if (latestRecordTimeStamp != null) { + builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", + latestRecordTimeStamp.getTime()); + } + if (lastDataTimeStamp != null) { + builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + "_string", + lastDataTimeStamp.getTime()); + } + if (latestEmptyBucketTimeStamp != null) { + builder.timeField(LATEST_EMPTY_BUCKET_TIME.getPreferredName(), LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", + latestEmptyBucketTimeStamp.getTime()); + } + if (latestSparseBucketTimeStamp != null) { + builder.timeField(LATEST_SPARSE_BUCKET_TIME.getPreferredName(), LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", + latestSparseBucketTimeStamp.getTime()); + } + builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); + + builder.endObject(); + return builder; + } + + /** + * Equality test + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DataCounts that = (DataCounts) other; + + return Objects.equals(this.jobId, that.jobId) && + this.processedRecordCount == that.processedRecordCount && + this.processedFieldCount == that.processedFieldCount && + this.inputBytes == that.inputBytes && + this.inputFieldCount == that.inputFieldCount && + this.invalidDateCount == that.invalidDateCount && + this.missingFieldCount == that.missingFieldCount && + this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount && + this.emptyBucketCount == that.emptyBucketCount && + this.sparseBucketCount == that.sparseBucketCount && + this.bucketCount == that.bucketCount && + Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) && + Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) && + Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) && + Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) && + Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, processedRecordCount, processedFieldCount, + inputBytes, inputFieldCount, invalidDateCount, missingFieldCount, + outOfOrderTimeStampCount, lastDataTimeStamp, emptyBucketCount, sparseBucketCount, bucketCount, + latestRecordTimeStamp, earliestRecordTimeStamp, latestEmptyBucketTimeStamp, latestSparseBucketTimeStamp); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java new file mode 100644 index 00000000000..e45e25f1aef --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java @@ -0,0 +1,293 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Result; + +import java.io.IOException; +import java.util.Date; +import java.util.Locale; +import java.util.Objects; + +/** + * Provide access to the C++ model memory usage numbers for the Java process. + */ +public class ModelSizeStats implements ToXContentObject { + + /** + * Result type + */ + public static final String RESULT_TYPE_VALUE = "model_size_stats"; + public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); + + /** + * Field Names + */ + public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); + public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); + public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); + public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); + public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count"); + public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status"); + public static final ParseField LOG_TIME_FIELD = new ParseField("log_time"); + public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Builder((String) a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); + PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); + PARSER.declareField(Builder::setLogTime, + (p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), + LOG_TIME_FIELD, + ValueType.VALUE); + PARSER.declareField(Builder::setTimestamp, + (p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), + TIMESTAMP_FIELD, + ValueType.VALUE); + PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); + } + + /** + * The status of the memory monitored by the ResourceMonitor. OK is default, + * SOFT_LIMIT means that the models have done some aggressive pruning to + * keep the memory below the limit, and HARD_LIMIT means that samples have + * been dropped + */ + public enum MemoryStatus { + OK, SOFT_LIMIT, HARD_LIMIT; + + public static MemoryStatus fromString(String statusName) { + return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + private final String jobId; + private final long modelBytes; + private final long totalByFieldCount; + private final long totalOverFieldCount; + private final long totalPartitionFieldCount; + private final long bucketAllocationFailuresCount; + private final MemoryStatus memoryStatus; + private final Date timestamp; + private final Date logTime; + + private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, long totalOverFieldCount, + long totalPartitionFieldCount, long bucketAllocationFailuresCount, MemoryStatus memoryStatus, + Date timestamp, Date logTime) { + this.jobId = jobId; + this.modelBytes = modelBytes; + this.totalByFieldCount = totalByFieldCount; + this.totalOverFieldCount = totalOverFieldCount; + this.totalPartitionFieldCount = totalPartitionFieldCount; + this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; + this.memoryStatus = memoryStatus; + this.timestamp = timestamp; + this.logTime = logTime; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); + builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); + builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); + builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); + builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); + builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount); + builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus); + builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); + if (timestamp != null) { + builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); + } + + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public long getModelBytes() { + return modelBytes; + } + + public long getTotalByFieldCount() { + return totalByFieldCount; + } + + public long getTotalPartitionFieldCount() { + return totalPartitionFieldCount; + } + + public long getTotalOverFieldCount() { + return totalOverFieldCount; + } + + public long getBucketAllocationFailuresCount() { + return bucketAllocationFailuresCount; + } + + public MemoryStatus getMemoryStatus() { + return memoryStatus; + } + + /** + * The timestamp of the last processed record when this instance was created. + * + * @return The record time + */ + public Date getTimestamp() { + return timestamp; + } + + /** + * The wall clock time at the point when this instance was created. + * + * @return The wall clock time + */ + public Date getLogTime() { + return logTime; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, + this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + ModelSizeStats that = (ModelSizeStats) other; + + return this.modelBytes == that.modelBytes && this.totalByFieldCount == that.totalByFieldCount + && this.totalOverFieldCount == that.totalOverFieldCount && this.totalPartitionFieldCount == that.totalPartitionFieldCount + && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount + && Objects.equals(this.memoryStatus, that.memoryStatus) && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.logTime, that.logTime) + && Objects.equals(this.jobId, that.jobId); + } + + public static class Builder { + + private final String jobId; + private long modelBytes; + private long totalByFieldCount; + private long totalOverFieldCount; + private long totalPartitionFieldCount; + private long bucketAllocationFailuresCount; + private MemoryStatus memoryStatus; + private Date timestamp; + private Date logTime; + + public Builder(String jobId) { + this.jobId = jobId; + memoryStatus = MemoryStatus.OK; + logTime = new Date(); + } + + public Builder(ModelSizeStats modelSizeStats) { + this.jobId = modelSizeStats.jobId; + this.modelBytes = modelSizeStats.modelBytes; + this.totalByFieldCount = modelSizeStats.totalByFieldCount; + this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; + this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; + this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount; + this.memoryStatus = modelSizeStats.memoryStatus; + this.timestamp = modelSizeStats.timestamp; + this.logTime = modelSizeStats.logTime; + } + + public Builder setModelBytes(long modelBytes) { + this.modelBytes = modelBytes; + return this; + } + + public Builder setTotalByFieldCount(long totalByFieldCount) { + this.totalByFieldCount = totalByFieldCount; + return this; + } + + public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) { + this.totalPartitionFieldCount = totalPartitionFieldCount; + return this; + } + + public Builder setTotalOverFieldCount(long totalOverFieldCount) { + this.totalOverFieldCount = totalOverFieldCount; + return this; + } + + public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) { + this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; + return this; + } + + public Builder setMemoryStatus(MemoryStatus memoryStatus) { + Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null"); + this.memoryStatus = memoryStatus; + return this; + } + + public Builder setTimestamp(Date timestamp) { + this.timestamp = timestamp; + return this; + } + + public Builder setLogTime(Date logTime) { + this.logTime = logTime; + return this; + } + + public ModelSizeStats build() { + return new ModelSizeStats(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, + bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java new file mode 100644 index 00000000000..ddf6a7984bf --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java @@ -0,0 +1,330 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Date; +import java.util.Objects; + +/** + * ModelSnapshot Result POJO + */ +public class ModelSnapshot implements ToXContentObject { + /** + * Field Names + */ + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField DESCRIPTION = new ParseField("description"); + public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); + public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); + public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); + public static final ParseField QUANTILES = new ParseField("quantiles"); + public static final ParseField RETAIN = new ParseField("retain"); + public static final ParseField MIN_VERSION = new ParseField("min_version"); + public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); + + public static final ObjectParser PARSER = new ObjectParser<>("model_snapshot", true, Builder::new); + + static { + PARSER.declareString(Builder::setJobId, Job.ID); + PARSER.declareString(Builder::setMinVersion, MIN_VERSION); + PARSER.declareField(Builder::setTimestamp, + (p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()), + TIMESTAMP, + ValueType.VALUE); + PARSER.declareString(Builder::setDescription, DESCRIPTION); + PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); + PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); + PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, + ModelSizeStats.RESULT_TYPE_FIELD); + PARSER.declareField(Builder::setLatestRecordTimeStamp, + (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), + LATEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(Builder::setLatestResultTimeStamp, + (p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), + LATEST_RESULT_TIME, + ValueType.VALUE); + PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES); + PARSER.declareBoolean(Builder::setRetain, RETAIN); + } + + + private final String jobId; + + /** + * The minimum version a node should have to be able + * to read this model snapshot. + */ + private final Version minVersion; + + private final Date timestamp; + private final String description; + private final String snapshotId; + private final int snapshotDocCount; + private final ModelSizeStats modelSizeStats; + private final Date latestRecordTimeStamp; + private final Date latestResultTimeStamp; + private final Quantiles quantiles; + private final boolean retain; + + + private ModelSnapshot(String jobId, Version minVersion, Date timestamp, String description, String snapshotId, int snapshotDocCount, + ModelSizeStats modelSizeStats, Date latestRecordTimeStamp, Date latestResultTimeStamp, Quantiles quantiles, + boolean retain) { + this.jobId = jobId; + this.minVersion = minVersion; + this.timestamp = timestamp; + this.description = description; + this.snapshotId = snapshotId; + this.snapshotDocCount = snapshotDocCount; + this.modelSizeStats = modelSizeStats; + this.latestRecordTimeStamp = latestRecordTimeStamp; + this.latestResultTimeStamp = latestResultTimeStamp; + this.quantiles = quantiles; + this.retain = retain; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(MIN_VERSION.getPreferredName(), minVersion); + if (timestamp != null) { + builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + } + if (description != null) { + builder.field(DESCRIPTION.getPreferredName(), description); + } + if (snapshotId != null) { + builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); + } + builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); + if (modelSizeStats != null) { + builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); + } + if (latestRecordTimeStamp != null) { + builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", + latestRecordTimeStamp.getTime()); + } + if (latestResultTimeStamp != null) { + builder.timeField(LATEST_RESULT_TIME.getPreferredName(), LATEST_RESULT_TIME.getPreferredName() + "_string", + latestResultTimeStamp.getTime()); + } + if (quantiles != null) { + builder.field(QUANTILES.getPreferredName(), quantiles); + } + builder.field(RETAIN.getPreferredName(), retain); + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public Version getMinVersion() { + return minVersion; + } + + public Date getTimestamp() { + return timestamp; + } + + public String getDescription() { + return description; + } + + public String getSnapshotId() { + return snapshotId; + } + + public int getSnapshotDocCount() { + return snapshotDocCount; + } + + public ModelSizeStats getModelSizeStats() { + return modelSizeStats; + } + + public Quantiles getQuantiles() { + return quantiles; + } + + public Date getLatestRecordTimeStamp() { + return latestRecordTimeStamp; + } + + public Date getLatestResultTimeStamp() { + return latestResultTimeStamp; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, minVersion, timestamp, description, snapshotId, quantiles, snapshotDocCount, modelSizeStats, + latestRecordTimeStamp, latestResultTimeStamp, retain); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + ModelSnapshot that = (ModelSnapshot) other; + + return Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.minVersion, that.minVersion) + && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.description, that.description) + && Objects.equals(this.snapshotId, that.snapshotId) + && this.snapshotDocCount == that.snapshotDocCount + && Objects.equals(this.modelSizeStats, that.modelSizeStats) + && Objects.equals(this.quantiles, that.quantiles) + && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) + && Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp) + && this.retain == that.retain; + } + + public static class Builder { + private String jobId; + + // Stored snapshot documents created prior to 6.3.0 will have no + // value for min_version. We default it to 5.5.0 as there were + // no model changes between 5.5.0 and 6.3.0. + private Version minVersion = Version.V_5_5_0; + + private Date timestamp; + private String description; + private String snapshotId; + private int snapshotDocCount; + private ModelSizeStats modelSizeStats; + private Date latestRecordTimeStamp; + private Date latestResultTimeStamp; + private Quantiles quantiles; + private boolean retain; + + + public Builder() { + } + + public Builder(String jobId) { + this.jobId = jobId; + } + + public Builder(ModelSnapshot modelSnapshot) { + this.jobId = modelSnapshot.jobId; + this.timestamp = modelSnapshot.timestamp; + this.description = modelSnapshot.description; + this.snapshotId = modelSnapshot.snapshotId; + this.snapshotDocCount = modelSnapshot.snapshotDocCount; + this.modelSizeStats = modelSnapshot.modelSizeStats; + this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp; + this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp; + this.quantiles = modelSnapshot.quantiles; + this.retain = modelSnapshot.retain; + this.minVersion = modelSnapshot.minVersion; + } + + public Builder setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + Builder setMinVersion(Version minVersion) { + this.minVersion = minVersion; + return this; + } + + Builder setMinVersion(String minVersion) { + this.minVersion = Version.fromString(minVersion); + return this; + } + + public Builder setTimestamp(Date timestamp) { + this.timestamp = timestamp; + return this; + } + + public Builder setDescription(String description) { + this.description = description; + return this; + } + + public Builder setSnapshotId(String snapshotId) { + this.snapshotId = snapshotId; + return this; + } + + public Builder setSnapshotDocCount(int snapshotDocCount) { + this.snapshotDocCount = snapshotDocCount; + return this; + } + + public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) { + this.modelSizeStats = modelSizeStats.build(); + return this; + } + + public Builder setModelSizeStats(ModelSizeStats modelSizeStats) { + this.modelSizeStats = modelSizeStats; + return this; + } + + public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) { + this.latestRecordTimeStamp = latestRecordTimeStamp; + return this; + } + + public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) { + this.latestResultTimeStamp = latestResultTimeStamp; + return this; + } + + public Builder setQuantiles(Quantiles quantiles) { + this.quantiles = quantiles; + return this; + } + + public Builder setRetain(boolean value) { + this.retain = value; + return this; + } + + public ModelSnapshot build() { + return new ModelSnapshot(jobId, minVersion, timestamp, description, snapshotId, snapshotDocCount, modelSizeStats, + latestRecordTimeStamp, latestResultTimeStamp, quantiles, retain); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java new file mode 100644 index 00000000000..1c047d6c302 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Date; +import java.util.Objects; + +/** + * Quantiles Result POJO + */ +public class Quantiles implements ToXContentObject { + + /** + * Field Names + */ + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("quantiles", true, a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG); + PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE); + } + + private final String jobId; + private final Date timestamp; + private final String quantileState; + + public Quantiles(String jobId, Date timestamp, String quantileState) { + this.jobId = jobId; + this.timestamp = Objects.requireNonNull(timestamp); + this.quantileState = Objects.requireNonNull(quantileState); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (timestamp != null) { + builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime()); + } + if (quantileState != null) { + builder.field(QUANTILE_STATE.getPreferredName(), quantileState); + } + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public Date getTimestamp() { + return timestamp; + } + + public String getQuantileState() { + return quantileState; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timestamp, quantileState); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + Quantiles that = (Quantiles) other; + + return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.quantileState, that.quantileState); + } +} + diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/TimeUtil.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/TimeUtil.java new file mode 100644 index 00000000000..a52b99d0af7 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/TimeUtil.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.time.format.DateTimeFormatter; +import java.util.Date; + +final class TimeUtil { + + /** + * Parse out a Date object given the current parser and field name. + * + * @param parser current XContentParser + * @param fieldName the field's preferred name (utilized in exception) + * @return parsed Date object + * @throws IOException from XContentParser + */ + static Date parseTimeField(XContentParser parser, String fieldName) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return new Date(parser.longValue()); + } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant().toEpochMilli()); + } + throw new IllegalArgumentException( + "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java index 8289032634e..4747f3a48bd 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -88,7 +89,7 @@ public class AnomalyRecord implements ToXContentObject { static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -159,7 +160,7 @@ public class AnomalyRecord implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(PROBABILITY.getPreferredName(), probability); builder.field(RECORD_SCORE.getPreferredName(), recordScore); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java index dc56c7bd262..cbaf83abbad 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -61,7 +62,7 @@ public class Bucket implements ToXContentObject { new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -104,7 +105,7 @@ public class Bucket implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java index c556737213e..29d8447cd6a 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -54,7 +55,7 @@ public class BucketInfluencer implements ToXContentObject { a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -93,7 +94,7 @@ public class BucketInfluencer implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); if (influenceField != null) { builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java index 2b452eeb828..59b59006b33 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.util.ArrayList; @@ -49,7 +50,7 @@ public class CategoryDefinition implements ToXContentObject { new ConstructingObjectParser<>(TYPE.getPreferredName(), true, a -> new CategoryDefinition((String) a[0])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); PARSER.declareString(CategoryDefinition::setTerms, TERMS); PARSER.declareString(CategoryDefinition::setRegex, REGEX); @@ -130,7 +131,7 @@ public class CategoryDefinition implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(CATEGORY_ID.getPreferredName(), categoryId); builder.field(TERMS.getPreferredName(), terms); builder.field(REGEX.getPreferredName(), regex); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java index ce3a032e54b..51c88883608 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -57,7 +58,7 @@ public class Influencer implements ToXContentObject { a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { @@ -98,7 +99,7 @@ public class Influencer implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java index 217f0bf5e21..4f13b4b2664 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -158,7 +159,7 @@ public class OverallBucket implements ToXContentObject { new ConstructingObjectParser<>("job_info", true, a -> new JobInfo((String) a[0], (double) a[1])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); } @@ -181,7 +182,7 @@ public class OverallBucket implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); builder.endObject(); return builder; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java index 0cd8a09da95..cce5fa65ebb 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java @@ -28,7 +28,6 @@ public final class Result { /** * Serialisation fields */ - public static final ParseField JOB_ID = new ParseField("job_id"); public static final ParseField TYPE = new ParseField("result"); public static final ParseField RESULT_TYPE = new ParseField("result_type"); public static final ParseField TIMESTAMP = new ParseField("timestamp"); diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java new file mode 100644 index 00000000000..2232e8c88d9 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.joda.time.DateTime; + +import java.util.Date; + +import static org.hamcrest.Matchers.greaterThan; + +public class DataCountsTests extends AbstractXContentTestCase { + + public static DataCounts createTestInstance(String jobId) { + return new DataCounts(jobId, randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), + new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), + new DateTime(randomDateTimeZone()).toDate()); + } + + @Override + public DataCounts createTestInstance() { + return createTestInstance(randomAlphaOfLength(10)); + } + + @Override + protected DataCounts doParseInstance(XContentParser parser) { + return DataCounts.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + public void testCountsEquals_GivenEqualCounts() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + + assertTrue(counts1.equals(counts2)); + assertTrue(counts2.equals(counts1)); + } + + public void testCountsHashCode_GivenEqualCounts() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + assertEquals(counts1.hashCode(), counts2.hashCode()); + } + + public void testCountsCopyConstructor() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = new DataCounts(counts1); + + assertEquals(counts1.hashCode(), counts2.hashCode()); + } + + public void testCountCreatedZero() throws Exception { + DataCounts counts = new DataCounts(randomAlphaOfLength(16)); + assertAllFieldsEqualZero(counts); + } + + public void testCountCopyCreatedFieldsNotZero() throws Exception { + DataCounts counts1 = createCounts(1, 200, 400, 3, 4, 5, 6, 7, 8, 9, 1479211200000L, 1479384000000L, 13, 14, 15); + assertAllFieldsGreaterThanZero(counts1); + + DataCounts counts2 = new DataCounts(counts1); + assertAllFieldsGreaterThanZero(counts2); + } + + private void assertAllFieldsEqualZero(DataCounts stats) throws Exception { + assertEquals(0L, stats.getProcessedRecordCount()); + assertEquals(0L, stats.getProcessedFieldCount()); + assertEquals(0L, stats.getInputBytes()); + assertEquals(0L, stats.getInputFieldCount()); + assertEquals(0L, stats.getInputRecordCount()); + assertEquals(0L, stats.getInvalidDateCount()); + assertEquals(0L, stats.getMissingFieldCount()); + assertEquals(0L, stats.getOutOfOrderTimeStampCount()); + } + + private void assertAllFieldsGreaterThanZero(DataCounts stats) throws Exception { + assertThat(stats.getProcessedRecordCount(), greaterThan(0L)); + assertThat(stats.getProcessedFieldCount(), greaterThan(0L)); + assertThat(stats.getInputBytes(), greaterThan(0L)); + assertThat(stats.getInputFieldCount(), greaterThan(0L)); + assertThat(stats.getInputRecordCount(), greaterThan(0L)); + assertThat(stats.getInputRecordCount(), greaterThan(0L)); + assertThat(stats.getInvalidDateCount(), greaterThan(0L)); + assertThat(stats.getMissingFieldCount(), greaterThan(0L)); + assertThat(stats.getOutOfOrderTimeStampCount(), greaterThan(0L)); + assertThat(stats.getLatestRecordTimeStamp().getTime(), greaterThan(0L)); + } + + private static DataCounts createCounts( + long processedRecordCount, long processedFieldCount, long inputBytes, long inputFieldCount, + long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount, + long emptyBucketCount, long sparseBucketCount, long bucketCount, + long earliestRecordTime, long latestRecordTime, long lastDataTimeStamp, long latestEmptyBucketTimeStamp, + long latestSparseBucketTimeStamp) { + + DataCounts counts = new DataCounts("foo", processedRecordCount, processedFieldCount, inputBytes, + inputFieldCount, invalidDateCount, missingFieldCount, outOfOrderTimeStampCount, + emptyBucketCount, sparseBucketCount, bucketCount, + new Date(earliestRecordTime), new Date(latestRecordTime), + new Date(lastDataTimeStamp), new Date(latestEmptyBucketTimeStamp), new Date(latestSparseBucketTimeStamp)); + + return counts; + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java new file mode 100644 index 00000000000..e3341123fb0 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.protocol.xpack.ml.job.process.ModelSizeStats.MemoryStatus; + +import java.util.Date; + +public class ModelSizeStatsTests extends AbstractXContentTestCase { + + public void testDefaultConstructor() { + ModelSizeStats stats = new ModelSizeStats.Builder("foo").build(); + assertEquals(0, stats.getModelBytes()); + assertEquals(0, stats.getTotalByFieldCount()); + assertEquals(0, stats.getTotalOverFieldCount()); + assertEquals(0, stats.getTotalPartitionFieldCount()); + assertEquals(0, stats.getBucketAllocationFailuresCount()); + assertEquals(MemoryStatus.OK, stats.getMemoryStatus()); + } + + public void testSetMemoryStatus_GivenNull() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + + NullPointerException ex = expectThrows(NullPointerException.class, () -> stats.setMemoryStatus(null)); + + assertEquals("[memory_status] must not be null", ex.getMessage()); + } + + public void testSetMemoryStatus_GivenSoftLimit() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + + stats.setMemoryStatus(MemoryStatus.SOFT_LIMIT); + + assertEquals(MemoryStatus.SOFT_LIMIT, stats.build().getMemoryStatus()); + } + + @Override + protected ModelSizeStats createTestInstance() { + return createRandomized(); + } + + public static ModelSizeStats createRandomized() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + if (randomBoolean()) { + stats.setBucketAllocationFailuresCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setModelBytes(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalByFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalOverFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalPartitionFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setLogTime(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + } + if (randomBoolean()) { + stats.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + } + if (randomBoolean()) { + stats.setMemoryStatus(randomFrom(MemoryStatus.values())); + } + return stats.build(); + } + + @Override + protected ModelSizeStats doParseInstance(XContentParser parser) { + return ModelSizeStats.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java new file mode 100644 index 00000000000..8c6a9bd83c9 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java @@ -0,0 +1,186 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.Version; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.Date; + +public class ModelSnapshotTests extends AbstractXContentTestCase { + + private static final Date DEFAULT_TIMESTAMP = new Date(); + private static final String DEFAULT_DESCRIPTION = "a snapshot"; + private static final String DEFAULT_ID = "my_id"; + private static final int DEFAULT_DOC_COUNT = 7; + private static final Date DEFAULT_LATEST_RESULT_TIMESTAMP = new Date(12345678901234L); + private static final Date DEFAULT_LATEST_RECORD_TIMESTAMP = new Date(12345678904321L); + private static final boolean DEFAULT_RETAIN = true; + + public void testCopyBuilder() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = new ModelSnapshot.Builder(modelSnapshot1).build(); + assertEquals(modelSnapshot1, modelSnapshot2); + } + + public void testEquals_GivenSameObject() { + ModelSnapshot modelSnapshot = createFullyPopulated().build(); + assertTrue(modelSnapshot.equals(modelSnapshot)); + } + + public void testEquals_GivenObjectOfDifferentClass() { + ModelSnapshot modelSnapshot = createFullyPopulated().build(); + assertFalse(modelSnapshot.equals("a string")); + } + + public void testEquals_GivenEqualModelSnapshots() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().build(); + + assertEquals(modelSnapshot1, modelSnapshot2); + assertEquals(modelSnapshot2, modelSnapshot1); + assertEquals(modelSnapshot1.hashCode(), modelSnapshot2.hashCode()); + } + + public void testEquals_GivenDifferentTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setTimestamp( + new Date(modelSnapshot1.getTimestamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentDescription() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setDescription(modelSnapshot1.getDescription() + " blah").build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentId() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setSnapshotId(modelSnapshot1.getSnapshotId() + "_2").build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentDocCount() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setSnapshotDocCount(modelSnapshot1.getSnapshotDocCount() + 1).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentModelSizeStats() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSizeStats.Builder modelSizeStats = new ModelSizeStats.Builder("foo"); + modelSizeStats.setModelBytes(42L); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setModelSizeStats(modelSizeStats).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentQuantiles() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setQuantiles(new Quantiles("foo", modelSnapshot1.getQuantiles().getTimestamp(), + "different state")).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentLatestResultTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestResultTimeStamp( + new Date(modelSnapshot1.getLatestResultTimeStamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentLatestRecordTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestRecordTimeStamp( + new Date(modelSnapshot1.getLatestRecordTimeStamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + private static ModelSnapshot.Builder createFullyPopulated() { + ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder(); + modelSnapshot.setJobId("foo"); + modelSnapshot.setMinVersion(Version.CURRENT); + modelSnapshot.setTimestamp(DEFAULT_TIMESTAMP); + modelSnapshot.setDescription(DEFAULT_DESCRIPTION); + modelSnapshot.setSnapshotId(DEFAULT_ID); + modelSnapshot.setSnapshotDocCount(DEFAULT_DOC_COUNT); + ModelSizeStats.Builder modelSizeStatsBuilder = new ModelSizeStats.Builder("foo"); + modelSizeStatsBuilder.setLogTime(null); + modelSnapshot.setModelSizeStats(modelSizeStatsBuilder); + modelSnapshot.setLatestResultTimeStamp(DEFAULT_LATEST_RESULT_TIMESTAMP); + modelSnapshot.setLatestRecordTimeStamp(DEFAULT_LATEST_RECORD_TIMESTAMP); + modelSnapshot.setQuantiles(new Quantiles("foo", DEFAULT_TIMESTAMP, "state")); + modelSnapshot.setRetain(DEFAULT_RETAIN); + return modelSnapshot; + } + + @Override + protected ModelSnapshot createTestInstance() { + return createRandomized(); + } + + public static ModelSnapshot createRandomized() { + ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setMinVersion(Version.CURRENT); + modelSnapshot.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setDescription(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setSnapshotId(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setSnapshotDocCount(randomInt()); + modelSnapshot.setModelSizeStats(ModelSizeStatsTests.createRandomized()); + modelSnapshot.setLatestResultTimeStamp( + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setLatestRecordTimeStamp( + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setQuantiles(QuantilesTests.createRandomized()); + modelSnapshot.setRetain(randomBoolean()); + return modelSnapshot.build(); + } + + @Override + protected ModelSnapshot doParseInstance(XContentParser parser){ + return ModelSnapshot.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java new file mode 100644 index 00000000000..77ae21bc6f8 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.Date; + + +public class QuantilesTests extends AbstractXContentTestCase { + + public void testEquals_GivenSameObject() { + Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo"); + assertTrue(quantiles.equals(quantiles)); + } + + + public void testEquals_GivenDifferentClassObject() { + Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo"); + assertFalse(quantiles.equals("not a quantiles object")); + } + + + public void testEquals_GivenEqualQuantilesObject() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo"); + + assertTrue(quantiles1.equals(quantiles2)); + assertTrue(quantiles2.equals(quantiles1)); + } + + + public void testEquals_GivenDifferentState() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "bar1"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "bar2"); + + assertFalse(quantiles1.equals(quantiles2)); + assertFalse(quantiles2.equals(quantiles1)); + } + + + public void testHashCode_GivenEqualObject() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo"); + + assertEquals(quantiles1.hashCode(), quantiles2.hashCode()); + } + + + @Override + protected Quantiles createTestInstance() { + return createRandomized(); + } + + public static Quantiles createRandomized() { + return new Quantiles(randomAlphaOfLengthBetween(1, 20), + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()), + randomAlphaOfLengthBetween(0, 1000)); + } + + @Override + protected Quantiles doParseInstance(XContentParser parser) { + return Quantiles.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +}