Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
0fe21136db
|
@ -26,7 +26,7 @@ import org.elasticsearch.action.ingest.GetPipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -54,9 +54,9 @@ public final class IngestClient {
|
||||||
* @return the response
|
* @return the response
|
||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public WritePipelineResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
|
public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options,
|
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||||
WritePipelineResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -67,9 +67,9 @@ public final class IngestClient {
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<WritePipelineResponse> listener) {
|
public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||||
WritePipelineResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -109,9 +109,9 @@ public final class IngestClient {
|
||||||
* @return the response
|
* @return the response
|
||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public WritePipelineResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
|
public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options,
|
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||||
WritePipelineResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -123,9 +123,9 @@ public final class IngestClient {
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener<WritePipelineResponse> listener) {
|
public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||||
WritePipelineResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentResult;
|
||||||
import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult;
|
import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
@ -50,7 +50,7 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase {
|
||||||
BytesReference.bytes(pipelineBuilder),
|
BytesReference.bytes(pipelineBuilder),
|
||||||
pipelineBuilder.contentType());
|
pipelineBuilder.contentType());
|
||||||
|
|
||||||
WritePipelineResponse putPipelineResponse =
|
AcknowledgedResponse putPipelineResponse =
|
||||||
execute(request, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync);
|
execute(request, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync);
|
||||||
assertTrue(putPipelineResponse.isAcknowledged());
|
assertTrue(putPipelineResponse.isAcknowledged());
|
||||||
}
|
}
|
||||||
|
@ -86,7 +86,7 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
DeletePipelineRequest request = new DeletePipelineRequest(id);
|
DeletePipelineRequest request = new DeletePipelineRequest(id);
|
||||||
|
|
||||||
WritePipelineResponse response =
|
AcknowledgedResponse response =
|
||||||
execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync);
|
execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync);
|
||||||
assertTrue(response.isAcknowledged());
|
assertTrue(response.isAcknowledged());
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.SimulateProcessorResult;
|
import org.elasticsearch.action.ingest.SimulateProcessorResult;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
import org.elasticsearch.client.RequestOptions;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
@ -93,7 +93,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
// end::put-pipeline-request-masterTimeout
|
// end::put-pipeline-request-masterTimeout
|
||||||
|
|
||||||
// tag::put-pipeline-execute
|
// tag::put-pipeline-execute
|
||||||
WritePipelineResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1>
|
AcknowledgedResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1>
|
||||||
// end::put-pipeline-execute
|
// end::put-pipeline-execute
|
||||||
|
|
||||||
// tag::put-pipeline-response
|
// tag::put-pipeline-response
|
||||||
|
@ -117,10 +117,10 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
);
|
);
|
||||||
|
|
||||||
// tag::put-pipeline-execute-listener
|
// tag::put-pipeline-execute-listener
|
||||||
ActionListener<WritePipelineResponse> listener =
|
ActionListener<AcknowledgedResponse> listener =
|
||||||
new ActionListener<WritePipelineResponse>() {
|
new ActionListener<AcknowledgedResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(WritePipelineResponse response) {
|
public void onResponse(AcknowledgedResponse response) {
|
||||||
// <1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,7 +236,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
// end::delete-pipeline-request-masterTimeout
|
// end::delete-pipeline-request-masterTimeout
|
||||||
|
|
||||||
// tag::delete-pipeline-execute
|
// tag::delete-pipeline-execute
|
||||||
WritePipelineResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1>
|
AcknowledgedResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1>
|
||||||
// end::delete-pipeline-execute
|
// end::delete-pipeline-execute
|
||||||
|
|
||||||
// tag::delete-pipeline-response
|
// tag::delete-pipeline-response
|
||||||
|
@ -257,10 +257,10 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
DeletePipelineRequest request = new DeletePipelineRequest("my-pipeline-id");
|
DeletePipelineRequest request = new DeletePipelineRequest("my-pipeline-id");
|
||||||
|
|
||||||
// tag::delete-pipeline-execute-listener
|
// tag::delete-pipeline-execute-listener
|
||||||
ActionListener<WritePipelineResponse> listener =
|
ActionListener<AcknowledgedResponse> listener =
|
||||||
new ActionListener<WritePipelineResponse>() {
|
new ActionListener<AcknowledgedResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(WritePipelineResponse response) {
|
public void onResponse(AcknowledgedResponse response) {
|
||||||
// <1>
|
// <1>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.painless;
|
package org.elasticsearch.painless;
|
||||||
|
|
||||||
import org.elasticsearch.painless.Locals.LocalMethod;
|
import org.elasticsearch.painless.Locals.LocalMethod;
|
||||||
import org.elasticsearch.painless.lookup.PainlessClass;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookup;
|
import org.elasticsearch.painless.lookup.PainlessLookup;
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
|
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
|
||||||
import org.elasticsearch.painless.lookup.PainlessMethod;
|
import org.elasticsearch.painless.lookup.PainlessMethod;
|
||||||
|
@ -38,6 +37,8 @@ import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Support for dynamic type (def).
|
* Support for dynamic type (def).
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -167,52 +168,6 @@ public final class Def {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Looks up method entry for a dynamic method call.
|
|
||||||
* <p>
|
|
||||||
* A dynamic method call for variable {@code x} of type {@code def} looks like:
|
|
||||||
* {@code x.method(args...)}
|
|
||||||
* <p>
|
|
||||||
* This method traverses {@code recieverClass}'s class hierarchy (including interfaces)
|
|
||||||
* until it finds a matching whitelisted method. If one is not found, it throws an exception.
|
|
||||||
* Otherwise it returns the matching method.
|
|
||||||
* <p>
|
|
||||||
* @params painlessLookup the whitelist
|
|
||||||
* @param receiverClass Class of the object to invoke the method on.
|
|
||||||
* @param name Name of the method.
|
|
||||||
* @param arity arity of method
|
|
||||||
* @return matching method to invoke. never returns null.
|
|
||||||
* @throws IllegalArgumentException if no matching whitelisted method was found.
|
|
||||||
*/
|
|
||||||
static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class<?> receiverClass, String name, int arity) {
|
|
||||||
String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity);
|
|
||||||
// check whitelist for matching method
|
|
||||||
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
|
|
||||||
PainlessClass struct = painlessLookup.lookupPainlessClass(clazz);
|
|
||||||
|
|
||||||
if (struct != null) {
|
|
||||||
PainlessMethod method = struct.methods.get(key);
|
|
||||||
if (method != null) {
|
|
||||||
return method;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Class<?> iface : clazz.getInterfaces()) {
|
|
||||||
struct = painlessLookup.lookupPainlessClass(iface);
|
|
||||||
|
|
||||||
if (struct != null) {
|
|
||||||
PainlessMethod method = struct.methods.get(key);
|
|
||||||
if (method != null) {
|
|
||||||
return method;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] with [" + arity + "] arguments " +
|
|
||||||
"for class [" + receiverClass.getCanonicalName() + "].");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Looks up handle for a dynamic method call, with lambda replacement
|
* Looks up handle for a dynamic method call, with lambda replacement
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -241,7 +196,14 @@ public final class Def {
|
||||||
int numArguments = callSiteType.parameterCount();
|
int numArguments = callSiteType.parameterCount();
|
||||||
// simple case: no lambdas
|
// simple case: no lambdas
|
||||||
if (recipeString.isEmpty()) {
|
if (recipeString.isEmpty()) {
|
||||||
return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).methodHandle;
|
PainlessMethod painlessMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, numArguments - 1);
|
||||||
|
|
||||||
|
if (painlessMethod == null) {
|
||||||
|
throw new IllegalArgumentException("dynamic method " +
|
||||||
|
"[" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + (numArguments - 1) + "] not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
return painlessMethod.methodHandle;
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert recipe string to a bitset for convenience (the code below should be refactored...)
|
// convert recipe string to a bitset for convenience (the code below should be refactored...)
|
||||||
|
@ -264,7 +226,13 @@ public final class Def {
|
||||||
|
|
||||||
// lookup the method with the proper arity, then we know everything (e.g. interface types of parameters).
|
// lookup the method with the proper arity, then we know everything (e.g. interface types of parameters).
|
||||||
// based on these we can finally link any remaining lambdas that were deferred.
|
// based on these we can finally link any remaining lambdas that were deferred.
|
||||||
PainlessMethod method = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
|
PainlessMethod method = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity);
|
||||||
|
|
||||||
|
if (method == null) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found");
|
||||||
|
}
|
||||||
|
|
||||||
MethodHandle handle = method.methodHandle;
|
MethodHandle handle = method.methodHandle;
|
||||||
|
|
||||||
int replaced = 0;
|
int replaced = 0;
|
||||||
|
@ -332,15 +300,23 @@ public final class Def {
|
||||||
static MethodHandle lookupReference(PainlessLookup painlessLookup, Map<String, LocalMethod> localMethods,
|
static MethodHandle lookupReference(PainlessLookup painlessLookup, Map<String, LocalMethod> localMethods,
|
||||||
MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class<?> receiverClass, String name) throws Throwable {
|
MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class<?> receiverClass, String name) throws Throwable {
|
||||||
Class<?> interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass);
|
Class<?> interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass);
|
||||||
|
if (interfaceType == null) {
|
||||||
|
throw new IllegalArgumentException("type [" + interfaceClass + "] not found");
|
||||||
|
}
|
||||||
PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType);
|
PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType);
|
||||||
if (interfaceMethod == null) {
|
if (interfaceMethod == null) {
|
||||||
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
|
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
|
||||||
}
|
}
|
||||||
int arity = interfaceMethod.typeParameters.size();
|
int arity = interfaceMethod.typeParameters.size();
|
||||||
PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
|
PainlessMethod implMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity);
|
||||||
|
if (implMethod == null) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found");
|
||||||
|
}
|
||||||
|
|
||||||
return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup,
|
return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup,
|
||||||
interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass),
|
interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass),
|
||||||
implMethod.javaMethod.getName(), 1);
|
implMethod.javaMethod.getName(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns a method handle to an implementation of clazz, given method reference signature. */
|
/** Returns a method handle to an implementation of clazz, given method reference signature. */
|
||||||
|
@ -389,27 +365,12 @@ public final class Def {
|
||||||
*/
|
*/
|
||||||
static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class<?> receiverClass, String name) {
|
static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class<?> receiverClass, String name) {
|
||||||
// first try whitelist
|
// first try whitelist
|
||||||
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
|
MethodHandle getter = painlessLookup.lookupRuntimeGetterMethodHandle(receiverClass, name);
|
||||||
PainlessClass struct = painlessLookup.lookupPainlessClass(clazz);
|
|
||||||
|
|
||||||
if (struct != null) {
|
if (getter != null) {
|
||||||
MethodHandle handle = struct.getterMethodHandles.get(name);
|
return getter;
|
||||||
if (handle != null) {
|
|
||||||
return handle;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (final Class<?> iface : clazz.getInterfaces()) {
|
|
||||||
struct = painlessLookup.lookupPainlessClass(iface);
|
|
||||||
|
|
||||||
if (struct != null) {
|
|
||||||
MethodHandle handle = struct.getterMethodHandles.get(name);
|
|
||||||
if (handle != null) {
|
|
||||||
return handle;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// special case: arrays, maps, and lists
|
// special case: arrays, maps, and lists
|
||||||
if (receiverClass.isArray() && "length".equals(name)) {
|
if (receiverClass.isArray() && "length".equals(name)) {
|
||||||
// arrays expose .length as a read-only getter
|
// arrays expose .length as a read-only getter
|
||||||
|
@ -426,12 +387,12 @@ public final class Def {
|
||||||
int index = Integer.parseInt(name);
|
int index = Integer.parseInt(name);
|
||||||
return MethodHandles.insertArguments(LIST_GET, 1, index);
|
return MethodHandles.insertArguments(LIST_GET, 1, index);
|
||||||
} catch (NumberFormatException exception) {
|
} catch (NumberFormatException exception) {
|
||||||
throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "].");
|
throw new IllegalArgumentException("Illegal list shortcut value [" + name + "].");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " +
|
throw new IllegalArgumentException(
|
||||||
"for class [" + receiverClass.getCanonicalName() + "].");
|
"dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -460,27 +421,12 @@ public final class Def {
|
||||||
*/
|
*/
|
||||||
static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class<?> receiverClass, String name) {
|
static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class<?> receiverClass, String name) {
|
||||||
// first try whitelist
|
// first try whitelist
|
||||||
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
|
MethodHandle setter = painlessLookup.lookupRuntimeSetterMethodHandle(receiverClass, name);
|
||||||
PainlessClass struct = painlessLookup.lookupPainlessClass(clazz);
|
|
||||||
|
|
||||||
if (struct != null) {
|
if (setter != null) {
|
||||||
MethodHandle handle = struct.setterMethodHandles.get(name);
|
return setter;
|
||||||
if (handle != null) {
|
|
||||||
return handle;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (final Class<?> iface : clazz.getInterfaces()) {
|
|
||||||
struct = painlessLookup.lookupPainlessClass(iface);
|
|
||||||
|
|
||||||
if (struct != null) {
|
|
||||||
MethodHandle handle = struct.setterMethodHandles.get(name);
|
|
||||||
if (handle != null) {
|
|
||||||
return handle;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// special case: maps, and lists
|
// special case: maps, and lists
|
||||||
if (Map.class.isAssignableFrom(receiverClass)) {
|
if (Map.class.isAssignableFrom(receiverClass)) {
|
||||||
// maps allow access like mymap.key
|
// maps allow access like mymap.key
|
||||||
|
@ -494,12 +440,12 @@ public final class Def {
|
||||||
int index = Integer.parseInt(name);
|
int index = Integer.parseInt(name);
|
||||||
return MethodHandles.insertArguments(LIST_SET, 1, index);
|
return MethodHandles.insertArguments(LIST_SET, 1, index);
|
||||||
} catch (final NumberFormatException exception) {
|
} catch (final NumberFormatException exception) {
|
||||||
throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "].");
|
throw new IllegalArgumentException("Illegal list shortcut value [" + name + "].");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " +
|
throw new IllegalArgumentException(
|
||||||
"for class [" + receiverClass.getCanonicalName() + "].");
|
"dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -67,11 +67,11 @@ public class FunctionRef {
|
||||||
PainlessMethod interfaceMethod;
|
PainlessMethod interfaceMethod;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
try {
|
interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass);
|
||||||
interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (interfaceMethod == null) {
|
||||||
throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " +
|
throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " +
|
||||||
"to a non-functional interface [" + targetClassName + "]", iae);
|
"to a non-functional interface [" + targetClassName + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
String interfaceMethodName = interfaceMethod.javaMethod.getName();
|
String interfaceMethodName = interfaceMethod.javaMethod.getName();
|
||||||
|
@ -116,14 +116,12 @@ public class FunctionRef {
|
||||||
throw new IllegalStateException("internal error");
|
throw new IllegalStateException("internal error");
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessConstructor painlessConstructor;
|
PainlessConstructor painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize);
|
||||||
|
|
||||||
try {
|
if (painlessConstructor == null) {
|
||||||
painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " +
|
throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " +
|
||||||
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
|
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
|
||||||
"not found", iae);
|
"not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName();
|
delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName();
|
||||||
|
@ -140,24 +138,21 @@ public class FunctionRef {
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean captured = numberOfCaptures == 1;
|
boolean captured = numberOfCaptures == 1;
|
||||||
PainlessMethod painlessMethod;
|
PainlessMethod painlessMethod =
|
||||||
|
painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize);
|
||||||
|
|
||||||
try {
|
if (painlessMethod == null) {
|
||||||
painlessMethod = painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize);
|
painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName,
|
||||||
|
captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1);
|
||||||
|
|
||||||
if (captured) {
|
if (painlessMethod == null) {
|
||||||
throw new IllegalStateException("internal error");
|
|
||||||
}
|
|
||||||
} catch (IllegalArgumentException staticIAE) {
|
|
||||||
try {
|
|
||||||
painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName,
|
|
||||||
captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " +
|
"function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " +
|
||||||
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
|
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
|
||||||
"not found", iae);
|
"not found");
|
||||||
}
|
}
|
||||||
|
} else if (captured) {
|
||||||
|
throw new IllegalStateException("internal error");
|
||||||
}
|
}
|
||||||
|
|
||||||
delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName();
|
delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName();
|
||||||
|
|
|
@ -19,15 +19,17 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless.lookup;
|
package org.elasticsearch.painless.lookup;
|
||||||
|
|
||||||
|
import java.lang.invoke.MethodHandle;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey;
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey;
|
||||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey;
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey;
|
||||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey;
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey;
|
||||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToBoxedType;
|
||||||
|
|
||||||
public final class PainlessLookup {
|
public final class PainlessLookup {
|
||||||
|
|
||||||
|
@ -67,6 +69,10 @@ public final class PainlessLookup {
|
||||||
|
|
||||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||||
|
|
||||||
|
if (targetClass == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return lookupPainlessConstructor(targetClass, constructorArity);
|
return lookupPainlessConstructor(targetClass, constructorArity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,15 +83,13 @@ public final class PainlessLookup {
|
||||||
String painlessConstructorKey = buildPainlessConstructorKey(constructorArity);
|
String painlessConstructorKey = buildPainlessConstructorKey(constructorArity);
|
||||||
|
|
||||||
if (targetPainlessClass == null) {
|
if (targetPainlessClass == null) {
|
||||||
throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] " +
|
return null;
|
||||||
"not found for constructor [" + painlessConstructorKey + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessConstructor painlessConstructor = targetPainlessClass.constructors.get(painlessConstructorKey);
|
PainlessConstructor painlessConstructor = targetPainlessClass.constructors.get(painlessConstructorKey);
|
||||||
|
|
||||||
if (painlessConstructor == null) {
|
if (painlessConstructor == null) {
|
||||||
throw new IllegalArgumentException(
|
return null;
|
||||||
"constructor [" + typeToCanonicalTypeName(targetClass) + ", " + painlessConstructorKey + "] not found");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return painlessConstructor;
|
return painlessConstructor;
|
||||||
|
@ -96,6 +100,10 @@ public final class PainlessLookup {
|
||||||
|
|
||||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||||
|
|
||||||
|
if (targetClass == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity);
|
return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,27 +112,19 @@ public final class PainlessLookup {
|
||||||
Objects.requireNonNull(methodName);
|
Objects.requireNonNull(methodName);
|
||||||
|
|
||||||
if (targetClass.isPrimitive()) {
|
if (targetClass.isPrimitive()) {
|
||||||
targetClass = PainlessLookupUtility.typeToBoxedType(targetClass);
|
targetClass = typeToBoxedType(targetClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
|
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
|
||||||
String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity);
|
String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity);
|
||||||
|
|
||||||
if (targetPainlessClass == null) {
|
if (targetPainlessClass == null) {
|
||||||
throw new IllegalArgumentException(
|
return null;
|
||||||
"target class [" + typeToCanonicalTypeName(targetClass) + "] not found for method [" + painlessMethodKey + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessMethod painlessMethod = isStatic ?
|
return isStatic ?
|
||||||
targetPainlessClass.staticMethods.get(painlessMethodKey) :
|
targetPainlessClass.staticMethods.get(painlessMethodKey) :
|
||||||
targetPainlessClass.methods.get(painlessMethodKey);
|
targetPainlessClass.methods.get(painlessMethodKey);
|
||||||
|
|
||||||
if (painlessMethod == null) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"method [" + typeToCanonicalTypeName(targetClass) + ", " + painlessMethodKey + "] not found");
|
|
||||||
}
|
|
||||||
|
|
||||||
return painlessMethod;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public PainlessField lookupPainlessField(String targetClassName, boolean isStatic, String fieldName) {
|
public PainlessField lookupPainlessField(String targetClassName, boolean isStatic, String fieldName) {
|
||||||
|
@ -132,6 +132,10 @@ public final class PainlessLookup {
|
||||||
|
|
||||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||||
|
|
||||||
|
if (targetClass == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return lookupPainlessField(targetClass, isStatic, fieldName);
|
return lookupPainlessField(targetClass, isStatic, fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,8 +147,7 @@ public final class PainlessLookup {
|
||||||
String painlessFieldKey = buildPainlessFieldKey(fieldName);
|
String painlessFieldKey = buildPainlessFieldKey(fieldName);
|
||||||
|
|
||||||
if (targetPainlessClass == null) {
|
if (targetPainlessClass == null) {
|
||||||
throw new IllegalArgumentException(
|
return null;
|
||||||
"target class [" + typeToCanonicalTypeName(targetClass) + "] not found for field [" + painlessFieldKey + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessField painlessField = isStatic ?
|
PainlessField painlessField = isStatic ?
|
||||||
|
@ -152,8 +155,7 @@ public final class PainlessLookup {
|
||||||
targetPainlessClass.fields.get(painlessFieldKey);
|
targetPainlessClass.fields.get(painlessFieldKey);
|
||||||
|
|
||||||
if (painlessField == null) {
|
if (painlessField == null) {
|
||||||
throw new IllegalArgumentException(
|
return null;
|
||||||
"field [" + typeToCanonicalTypeName(targetClass) + ", " + painlessFieldKey + "] not found");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return painlessField;
|
return painlessField;
|
||||||
|
@ -163,15 +165,77 @@ public final class PainlessLookup {
|
||||||
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
|
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
|
||||||
|
|
||||||
if (targetPainlessClass == null) {
|
if (targetPainlessClass == null) {
|
||||||
throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] not found");
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
PainlessMethod functionalInterfacePainlessMethod = targetPainlessClass.functionalInterfaceMethod;
|
return targetPainlessClass.functionalInterfaceMethod;
|
||||||
|
}
|
||||||
|
|
||||||
if (functionalInterfacePainlessMethod == null) {
|
public PainlessMethod lookupRuntimePainlessMethod(Class<?> originalTargetClass, String methodName, int methodArity) {
|
||||||
throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] is not a functional interface");
|
Objects.requireNonNull(originalTargetClass);
|
||||||
|
Objects.requireNonNull(methodName);
|
||||||
|
|
||||||
|
String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity);
|
||||||
|
Function<PainlessClass, PainlessMethod> objectLookup = targetPainlessClass -> targetPainlessClass.methods.get(painlessMethodKey);
|
||||||
|
|
||||||
|
return lookupRuntimePainlessObject(originalTargetClass, objectLookup);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MethodHandle lookupRuntimeGetterMethodHandle(Class<?> originalTargetClass, String getterName) {
|
||||||
|
Objects.requireNonNull(originalTargetClass);
|
||||||
|
Objects.requireNonNull(getterName);
|
||||||
|
|
||||||
|
Function<PainlessClass, MethodHandle> objectLookup = targetPainlessClass -> targetPainlessClass.getterMethodHandles.get(getterName);
|
||||||
|
|
||||||
|
return lookupRuntimePainlessObject(originalTargetClass, objectLookup);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MethodHandle lookupRuntimeSetterMethodHandle(Class<?> originalTargetClass, String setterName) {
|
||||||
|
Objects.requireNonNull(originalTargetClass);
|
||||||
|
Objects.requireNonNull(setterName);
|
||||||
|
|
||||||
|
Function<PainlessClass, MethodHandle> objectLookup = targetPainlessClass -> targetPainlessClass.setterMethodHandles.get(setterName);
|
||||||
|
|
||||||
|
return lookupRuntimePainlessObject(originalTargetClass, objectLookup);
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T lookupRuntimePainlessObject(
|
||||||
|
Class<?> originalTargetClass, Function<PainlessClass, T> objectLookup) {
|
||||||
|
|
||||||
|
Class<?> currentTargetClass = originalTargetClass;
|
||||||
|
|
||||||
|
while (currentTargetClass != null) {
|
||||||
|
PainlessClass targetPainlessClass = classesToPainlessClasses.get(currentTargetClass);
|
||||||
|
|
||||||
|
if (targetPainlessClass != null) {
|
||||||
|
T painlessObject = objectLookup.apply(targetPainlessClass);
|
||||||
|
|
||||||
|
if (painlessObject != null) {
|
||||||
|
return painlessObject;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
currentTargetClass = currentTargetClass.getSuperclass();
|
||||||
}
|
}
|
||||||
|
|
||||||
return functionalInterfacePainlessMethod;
|
currentTargetClass = originalTargetClass;
|
||||||
|
|
||||||
|
while (currentTargetClass != null) {
|
||||||
|
for (Class<?> targetInterface : currentTargetClass.getInterfaces()) {
|
||||||
|
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetInterface);
|
||||||
|
|
||||||
|
if (targetPainlessClass != null) {
|
||||||
|
T painlessObject = objectLookup.apply(targetPainlessClass);
|
||||||
|
|
||||||
|
if (painlessObject != null) {
|
||||||
|
return painlessObject;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
currentTargetClass = currentTargetClass.getSuperclass();
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -220,8 +220,12 @@ public final class PainlessLookupBuilder {
|
||||||
return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses);
|
return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void validateType(Class<?> type) {
|
private boolean isValidType(Class<?> type) {
|
||||||
PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet());
|
while (type.getComponentType() != null) {
|
||||||
|
type = type.getComponentType();
|
||||||
|
}
|
||||||
|
|
||||||
|
return classesToPainlessClassBuilders.containsKey(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) {
|
public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) {
|
||||||
|
@ -325,13 +329,14 @@ public final class PainlessLookupBuilder {
|
||||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||||
|
|
||||||
for (String typeNameParameter : typeNameParameters) {
|
for (String typeNameParameter : typeNameParameters) {
|
||||||
try {
|
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
|
||||||
typeParameters.add(typeParameter);
|
if (typeParameter == null) {
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||||
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae);
|
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
typeParameters.add(typeParameter);
|
||||||
}
|
}
|
||||||
|
|
||||||
addPainlessConstructor(targetClass, typeParameters);
|
addPainlessConstructor(targetClass, typeParameters);
|
||||||
|
@ -357,11 +362,9 @@ public final class PainlessLookupBuilder {
|
||||||
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize);
|
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize);
|
||||||
|
|
||||||
for (Class<?> typeParameter : typeParameters) {
|
for (Class<?> typeParameter : typeParameters) {
|
||||||
try {
|
if (isValidType(typeParameter) == false) {
|
||||||
validateType(typeParameter);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||||
"for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
"for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
javaTypeParameters.add(typeToJavaType(typeParameter));
|
javaTypeParameters.add(typeToJavaType(typeParameter));
|
||||||
|
@ -435,22 +438,21 @@ public final class PainlessLookupBuilder {
|
||||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||||
|
|
||||||
for (String typeNameParameter : typeNameParameters) {
|
for (String typeNameParameter : typeNameParameters) {
|
||||||
try {
|
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
|
||||||
typeParameters.add(typeParameter);
|
if (typeParameter == null) {
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " +
|
throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " +
|
||||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae);
|
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
typeParameters.add(typeParameter);
|
||||||
}
|
}
|
||||||
|
|
||||||
Class<?> returnType;
|
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
||||||
|
|
||||||
try {
|
if (returnType == null) {
|
||||||
returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " +
|
throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " +
|
||||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae);
|
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters);
|
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters);
|
||||||
|
@ -490,22 +492,18 @@ public final class PainlessLookupBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Class<?> typeParameter : typeParameters) {
|
for (Class<?> typeParameter : typeParameters) {
|
||||||
try {
|
if (isValidType(typeParameter) == false) {
|
||||||
validateType(typeParameter);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||||
"not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
"not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||||
typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
typesToCanonicalTypeNames(typeParameters) + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
javaTypeParameters.add(typeToJavaType(typeParameter));
|
javaTypeParameters.add(typeToJavaType(typeParameter));
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
if (isValidType(returnType) == false) {
|
||||||
validateType(returnType);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " +
|
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " +
|
||||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
Method javaMethod;
|
Method javaMethod;
|
||||||
|
@ -620,11 +618,9 @@ public final class PainlessLookupBuilder {
|
||||||
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
Class<?> typeParameter;
|
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||||
|
|
||||||
try {
|
if (typeParameter == null) {
|
||||||
typeParameter = canonicalTypeNameToType(typeNameParameter);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -656,11 +652,9 @@ public final class PainlessLookupBuilder {
|
||||||
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
if (isValidType(typeParameter) == false) {
|
||||||
validateType(typeParameter);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae);
|
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
Field javaField;
|
Field javaField;
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.painless.lookup;
|
package org.elasticsearch.painless.lookup;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -101,45 +100,47 @@ public final class PainlessLookupUtility {
|
||||||
canonicalTypeName.charAt(arrayIndex++) == ']') {
|
canonicalTypeName.charAt(arrayIndex++) == ']') {
|
||||||
++arrayDimensions;
|
++arrayDimensions;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found");
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('['));
|
canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('['));
|
||||||
type = canonicalClassNamesToClasses.get(canonicalTypeName);
|
type = canonicalClassNamesToClasses.get(canonicalTypeName);
|
||||||
|
|
||||||
char arrayBraces[] = new char[arrayDimensions];
|
if (type != null) {
|
||||||
Arrays.fill(arrayBraces, '[');
|
char arrayBraces[] = new char[arrayDimensions];
|
||||||
String javaTypeName = new String(arrayBraces);
|
Arrays.fill(arrayBraces, '[');
|
||||||
|
String javaTypeName = new String(arrayBraces);
|
||||||
|
|
||||||
if (type == boolean.class) {
|
if (type == boolean.class) {
|
||||||
javaTypeName += "Z";
|
javaTypeName += "Z";
|
||||||
} else if (type == byte.class) {
|
} else if (type == byte.class) {
|
||||||
javaTypeName += "B";
|
javaTypeName += "B";
|
||||||
} else if (type == short.class) {
|
} else if (type == short.class) {
|
||||||
javaTypeName += "S";
|
javaTypeName += "S";
|
||||||
} else if (type == char.class) {
|
} else if (type == char.class) {
|
||||||
javaTypeName += "C";
|
javaTypeName += "C";
|
||||||
} else if (type == int.class) {
|
} else if (type == int.class) {
|
||||||
javaTypeName += "I";
|
javaTypeName += "I";
|
||||||
} else if (type == long.class) {
|
} else if (type == long.class) {
|
||||||
javaTypeName += "J";
|
javaTypeName += "J";
|
||||||
} else if (type == float.class) {
|
} else if (type == float.class) {
|
||||||
javaTypeName += "F";
|
javaTypeName += "F";
|
||||||
} else if (type == double.class) {
|
} else if (type == double.class) {
|
||||||
javaTypeName += "D";
|
javaTypeName += "D";
|
||||||
} else {
|
} else {
|
||||||
javaTypeName += "L" + type.getName() + ";";
|
javaTypeName += "L" + type.getName() + ";";
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return Class.forName(javaTypeName);
|
return Class.forName(javaTypeName);
|
||||||
} catch (ClassNotFoundException cnfe) {
|
} catch (ClassNotFoundException cnfe) {
|
||||||
throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found", cnfe);
|
throw new IllegalStateException("internal error", cnfe);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found");
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -152,7 +153,9 @@ public final class PainlessLookupUtility {
|
||||||
|
|
||||||
String canonicalTypeName = type.getCanonicalName();
|
String canonicalTypeName = type.getCanonicalName();
|
||||||
|
|
||||||
if (canonicalTypeName.startsWith(def.class.getCanonicalName())) {
|
if (canonicalTypeName == null) {
|
||||||
|
canonicalTypeName = ANONYMOUS_CLASS_NAME;
|
||||||
|
} else if (canonicalTypeName.startsWith(def.class.getCanonicalName())) {
|
||||||
canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME);
|
canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,22 +255,6 @@ public final class PainlessLookupUtility {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensures a type exists based on the terminology specified as part of {@link PainlessLookupUtility}. Throws an
|
|
||||||
* {@link IllegalArgumentException} if the type does not exist.
|
|
||||||
*/
|
|
||||||
public static void validateType(Class<?> type, Collection<Class<?>> classes) {
|
|
||||||
String canonicalTypeName = typeToCanonicalTypeName(type);
|
|
||||||
|
|
||||||
while (type.getComponentType() != null) {
|
|
||||||
type = type.getComponentType();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (classes.contains(type) == false) {
|
|
||||||
throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of
|
* Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of
|
||||||
* {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function.
|
* {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function.
|
||||||
|
@ -357,6 +344,11 @@ public final class PainlessLookupUtility {
|
||||||
return fieldName;
|
return fieldName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name for an anonymous class.
|
||||||
|
*/
|
||||||
|
public static final String ANONYMOUS_CLASS_NAME = "$anonymous";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The def type name as specified in the source for a script.
|
* The def type name as specified in the source for a script.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -49,9 +49,9 @@ public final class EExplicit extends AExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
try {
|
actual = locals.getPainlessLookup().canonicalTypeNameToType(type);
|
||||||
actual = locals.getPainlessLookup().canonicalTypeNameToType(type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
if (actual == null) {
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,12 +54,11 @@ public final class EInstanceof extends AExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
Class<?> clazz;
|
|
||||||
|
|
||||||
// ensure the specified type is part of the definition
|
// ensure the specified type is part of the definition
|
||||||
try {
|
Class<?> clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
if (clazz == null) {
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,8 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a list initialization shortcut.
|
* Represents a list initialization shortcut.
|
||||||
*/
|
*/
|
||||||
|
@ -63,16 +65,17 @@ public final class EListInit extends AExpression {
|
||||||
|
|
||||||
actual = ArrayList.class;
|
actual = ArrayList.class;
|
||||||
|
|
||||||
try {
|
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0);
|
||||||
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (constructor == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException(
|
||||||
|
"constructor [" + typeToCanonicalTypeName(actual) + ", <init>/0] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1);
|
||||||
method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (method == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", add/1] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int index = 0; index < values.size(); ++index) {
|
for (int index = 0; index < values.size(); ++index) {
|
||||||
|
|
|
@ -33,6 +33,8 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a map initialization shortcut.
|
* Represents a map initialization shortcut.
|
||||||
*/
|
*/
|
||||||
|
@ -69,16 +71,17 @@ public final class EMapInit extends AExpression {
|
||||||
|
|
||||||
actual = HashMap.class;
|
actual = HashMap.class;
|
||||||
|
|
||||||
try {
|
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0);
|
||||||
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (constructor == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException(
|
||||||
|
"constructor [" + typeToCanonicalTypeName(actual) + ", <init>/0] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2);
|
||||||
method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (method == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", put/2] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (keys.size() != values.size()) {
|
if (keys.size() != values.size()) {
|
||||||
|
|
|
@ -54,15 +54,13 @@ public final class ENewArray extends AExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
if (!read) {
|
if (!read) {
|
||||||
throw createError(new IllegalArgumentException("A newly created array must be read from."));
|
throw createError(new IllegalArgumentException("A newly created array must be read from."));
|
||||||
}
|
}
|
||||||
|
|
||||||
Class<?> clazz;
|
Class<?> clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
|
|
||||||
try {
|
if (clazz == null) {
|
||||||
clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,8 @@ import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents and object instantiation.
|
* Represents and object instantiation.
|
||||||
*/
|
*/
|
||||||
|
@ -58,16 +60,17 @@ public final class ENewObj extends AExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
try {
|
actual = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
actual = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
if (actual == null) {
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size());
|
||||||
constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size());
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (constructor == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException(
|
||||||
|
"constructor [" + typeToCanonicalTypeName(actual) + ", <init>/" + arguments.size() + "] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Class<?>[] types = new Class<?>[constructor.typeParameters.size()];
|
Class<?>[] types = new Class<?>[constructor.typeParameters.size()];
|
||||||
|
|
|
@ -47,9 +47,9 @@ public final class EStatic extends AExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
try {
|
actual = locals.getPainlessLookup().canonicalTypeNameToType(type);
|
||||||
actual = locals.getPainlessLookup().canonicalTypeNameToType(type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
if (actual == null) {
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + type + "]."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,8 @@ import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a method call and defers to a child subnode.
|
* Represents a method call and defers to a child subnode.
|
||||||
*/
|
*/
|
||||||
|
@ -67,13 +69,15 @@ public final class PCallInvoke extends AExpression {
|
||||||
if (prefix.actual == def.class) {
|
if (prefix.actual == def.class) {
|
||||||
sub = new PSubDefCall(location, name, arguments);
|
sub = new PSubDefCall(location, name, arguments);
|
||||||
} else {
|
} else {
|
||||||
try {
|
PainlessMethod method =
|
||||||
PainlessMethod method =
|
locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size());
|
||||||
locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size());
|
|
||||||
sub = new PSubCallInvoke(location, method, prefix.actual, arguments);
|
if (method == null) {
|
||||||
} catch (IllegalArgumentException iae) {
|
throw createError(new IllegalArgumentException(
|
||||||
throw createError(iae);
|
"method [" + typeToCanonicalTypeName(prefix.actual) + ", " + name + "/" + arguments.size() + "] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sub = new PSubCallInvoke(location, method, prefix.actual, arguments);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nullSafe) {
|
if (nullSafe) {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.painless.Globals;
|
||||||
import org.elasticsearch.painless.Locals;
|
import org.elasticsearch.painless.Locals;
|
||||||
import org.elasticsearch.painless.Location;
|
import org.elasticsearch.painless.Location;
|
||||||
import org.elasticsearch.painless.MethodWriter;
|
import org.elasticsearch.painless.MethodWriter;
|
||||||
|
import org.elasticsearch.painless.lookup.PainlessField;
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
|
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
|
||||||
import org.elasticsearch.painless.lookup.PainlessMethod;
|
import org.elasticsearch.painless.lookup.PainlessMethod;
|
||||||
import org.elasticsearch.painless.lookup.def;
|
import org.elasticsearch.painless.lookup.def;
|
||||||
|
@ -32,6 +33,8 @@ import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a field load/store and defers to a child subnode.
|
* Represents a field load/store and defers to a child subnode.
|
||||||
*/
|
*/
|
||||||
|
@ -65,31 +68,22 @@ public final class PField extends AStoreable {
|
||||||
} else if (prefix.actual == def.class) {
|
} else if (prefix.actual == def.class) {
|
||||||
sub = new PSubDefField(location, value);
|
sub = new PSubDefField(location, value);
|
||||||
} else {
|
} else {
|
||||||
try {
|
PainlessField field = locals.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value);
|
||||||
sub = new PSubField(location,
|
|
||||||
locals.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value));
|
if (field == null) {
|
||||||
} catch (IllegalArgumentException fieldIAE) {
|
|
||||||
PainlessMethod getter;
|
PainlessMethod getter;
|
||||||
PainlessMethod setter;
|
PainlessMethod setter;
|
||||||
|
|
||||||
try {
|
getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
||||||
|
"get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
||||||
|
|
||||||
|
if (getter == null) {
|
||||||
getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
||||||
"get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
"is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
||||||
} catch (IllegalArgumentException getIAE) {
|
|
||||||
try {
|
|
||||||
getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
|
||||||
"is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
|
||||||
} catch (IllegalArgumentException isIAE) {
|
|
||||||
getter = null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
setter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
||||||
setter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false,
|
"set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
||||||
"set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0);
|
|
||||||
} catch (IllegalArgumentException setIAE) {
|
|
||||||
setter = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getter != null || setter != null) {
|
if (getter != null || setter != null) {
|
||||||
sub = new PSubShortcut(location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter);
|
sub = new PSubShortcut(location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter);
|
||||||
|
@ -107,8 +101,11 @@ public final class PField extends AStoreable {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sub == null) {
|
if (sub == null) {
|
||||||
throw createError(fieldIAE);
|
throw createError(new IllegalArgumentException(
|
||||||
|
"field [" + typeToCanonicalTypeName(prefix.actual) + ", " + value + "] not found"));
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
sub = new PSubField(location, field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,12 +57,8 @@ final class PSubListShortcut extends AStoreable {
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass);
|
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass);
|
||||||
|
|
||||||
try {
|
getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1);
|
||||||
getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1);
|
setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "set", 2);
|
||||||
setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "set", 2);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw createError(iae);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1 ||
|
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1 ||
|
||||||
getter.typeParameters.get(0) != int.class)) {
|
getter.typeParameters.get(0) != int.class)) {
|
||||||
|
|
|
@ -56,12 +56,8 @@ final class PSubMapShortcut extends AStoreable {
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass);
|
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass);
|
||||||
|
|
||||||
try {
|
getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1);
|
||||||
getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1);
|
setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "put", 2);
|
||||||
setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "put", 2);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
throw createError(iae);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1)) {
|
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1)) {
|
||||||
throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "]."));
|
throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "]."));
|
||||||
|
|
|
@ -64,11 +64,9 @@ public final class SCatch extends AStatement {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
Class<?> clazz;
|
Class<?> clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
|
|
||||||
try {
|
if (clazz == null) {
|
||||||
clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,11 +59,9 @@ public final class SDeclaration extends AStatement {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void analyze(Locals locals) {
|
void analyze(Locals locals) {
|
||||||
Class<?> clazz;
|
Class<?> clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
|
|
||||||
try {
|
if (clazz == null) {
|
||||||
clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -68,11 +68,9 @@ public class SEach extends AStatement {
|
||||||
expression.expected = expression.actual;
|
expression.expected = expression.actual;
|
||||||
expression = expression.cast(locals);
|
expression = expression.cast(locals);
|
||||||
|
|
||||||
Class<?> clazz;
|
Class<?> clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
||||||
|
|
||||||
try {
|
if (clazz == null) {
|
||||||
clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
|
||||||
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -115,9 +115,9 @@ public final class SFunction extends AStatement {
|
||||||
}
|
}
|
||||||
|
|
||||||
void generateSignature(PainlessLookup painlessLookup) {
|
void generateSignature(PainlessLookup painlessLookup) {
|
||||||
try {
|
returnType = painlessLookup.canonicalTypeNameToType(rtnTypeStr);
|
||||||
returnType = painlessLookup.canonicalTypeNameToType(rtnTypeStr);
|
|
||||||
} catch (IllegalArgumentException exception) {
|
if (returnType == null) {
|
||||||
throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "]."));
|
throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,16 +129,16 @@ public final class SFunction extends AStatement {
|
||||||
List<Class<?>> paramTypes = new ArrayList<>();
|
List<Class<?>> paramTypes = new ArrayList<>();
|
||||||
|
|
||||||
for (int param = 0; param < this.paramTypeStrs.size(); ++param) {
|
for (int param = 0; param < this.paramTypeStrs.size(); ++param) {
|
||||||
try {
|
|
||||||
Class<?> paramType = painlessLookup.canonicalTypeNameToType(this.paramTypeStrs.get(param));
|
Class<?> paramType = painlessLookup.canonicalTypeNameToType(this.paramTypeStrs.get(param));
|
||||||
|
|
||||||
paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType);
|
if (paramType == null) {
|
||||||
paramTypes.add(paramType);
|
|
||||||
parameters.add(new Parameter(location, paramNameStrs.get(param), paramType));
|
|
||||||
} catch (IllegalArgumentException exception) {
|
|
||||||
throw createError(new IllegalArgumentException(
|
throw createError(new IllegalArgumentException(
|
||||||
"Illegal parameter type [" + this.paramTypeStrs.get(param) + "] for function [" + name + "]."));
|
"Illegal parameter type [" + this.paramTypeStrs.get(param) + "] for function [" + name + "]."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType);
|
||||||
|
paramTypes.add(paramType);
|
||||||
|
parameters.add(new Parameter(location, paramNameStrs.get(param), paramType));
|
||||||
}
|
}
|
||||||
|
|
||||||
typeParameters = paramTypes;
|
typeParameters = paramTypes;
|
||||||
|
|
|
@ -40,6 +40,7 @@ import java.util.Set;
|
||||||
import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT;
|
import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT;
|
||||||
import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT;
|
import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT;
|
||||||
import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE;
|
import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE;
|
||||||
|
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a for-each loop for iterables.
|
* Represents a for-each loop for iterables.
|
||||||
|
@ -76,10 +77,11 @@ final class SSubEachIterable extends AStatement {
|
||||||
if (expression.actual == def.class) {
|
if (expression.actual == def.class) {
|
||||||
method = null;
|
method = null;
|
||||||
} else {
|
} else {
|
||||||
try {
|
method = locals.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0);
|
||||||
method = locals.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0);
|
|
||||||
} catch (IllegalArgumentException iae) {
|
if (method == null) {
|
||||||
throw createError(iae);
|
throw createError(new IllegalArgumentException(
|
||||||
|
"method [" + typeToCanonicalTypeName(expression.actual) + ", iterator/0] not found"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -134,7 +134,7 @@ public class DefBootstrapTests extends ESTestCase {
|
||||||
final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> {
|
final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> {
|
||||||
Integer.toString((int)handle.invokeExact(new Object()));
|
Integer.toString((int)handle.invokeExact(new Object()));
|
||||||
});
|
});
|
||||||
assertEquals("Unable to find dynamic method [size] with [0] arguments for class [java.lang.Object].", iae.getMessage());
|
assertEquals("dynamic method [java.lang.Object, size/0] not found", iae.getMessage());
|
||||||
assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> {
|
assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> {
|
||||||
return e.getMethodName().equals("computeValue") &&
|
return e.getMethodName().equals("computeValue") &&
|
||||||
e.getClassName().startsWith("org.elasticsearch.painless.DefBootstrap$PIC$");
|
e.getClassName().startsWith("org.elasticsearch.painless.DefBootstrap$PIC$");
|
||||||
|
|
|
@ -37,7 +37,7 @@ public class OverloadTests extends ScriptTestCase {
|
||||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||||
exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');");
|
exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');");
|
||||||
});
|
});
|
||||||
assertTrue(expected.getMessage().contains("dynamic method [indexOf]"));
|
assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, indexOf/3] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testConstructor() {
|
public void testConstructor() {
|
||||||
|
|
|
@ -219,7 +219,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase {
|
||||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||||
exec("def x = 'test'; return x.getClass().toString()");
|
exec("def x = 'test'; return x.getClass().toString()");
|
||||||
});
|
});
|
||||||
assertTrue(expected.getMessage().contains("Unable to find dynamic method"));
|
assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, getClass/0] not found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDynamicNPE() {
|
public void testDynamicNPE() {
|
||||||
|
|
|
@ -7,7 +7,6 @@ setup:
|
||||||
test:
|
test:
|
||||||
properties:
|
properties:
|
||||||
numeric_group: { type: integer }
|
numeric_group: { type: integer }
|
||||||
group_alias: { type: alias, path: numeric_group }
|
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
|
@ -354,6 +353,14 @@ setup:
|
||||||
- skip:
|
- skip:
|
||||||
version: " - 6.3.99"
|
version: " - 6.3.99"
|
||||||
reason: Field aliases were introduced in 6.4.0.
|
reason: Field aliases were introduced in 6.4.0.
|
||||||
|
- do:
|
||||||
|
indices.put_mapping:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
body:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
group_alias: { type: alias, path: numeric_group }
|
||||||
- do:
|
- do:
|
||||||
search:
|
search:
|
||||||
index: test
|
index: test
|
||||||
|
|
|
@ -20,8 +20,9 @@
|
||||||
package org.elasticsearch.action.ingest;
|
package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.action.Action;
|
import org.elasticsearch.action.Action;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
|
|
||||||
public class DeletePipelineAction extends Action<WritePipelineResponse> {
|
public class DeletePipelineAction extends Action<AcknowledgedResponse> {
|
||||||
|
|
||||||
public static final DeletePipelineAction INSTANCE = new DeletePipelineAction();
|
public static final DeletePipelineAction INSTANCE = new DeletePipelineAction();
|
||||||
public static final String NAME = "cluster:admin/ingest/pipeline/delete";
|
public static final String NAME = "cluster:admin/ingest/pipeline/delete";
|
||||||
|
@ -31,7 +32,7 @@ public class DeletePipelineAction extends Action<WritePipelineResponse> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public WritePipelineResponse newResponse() {
|
public AcknowledgedResponse newResponse() {
|
||||||
return new WritePipelineResponse();
|
return new AcknowledgedResponse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,9 +20,10 @@
|
||||||
package org.elasticsearch.action.ingest;
|
package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionRequestBuilder;
|
import org.elasticsearch.action.ActionRequestBuilder;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.client.ElasticsearchClient;
|
import org.elasticsearch.client.ElasticsearchClient;
|
||||||
|
|
||||||
public class DeletePipelineRequestBuilder extends ActionRequestBuilder<DeletePipelineRequest, WritePipelineResponse> {
|
public class DeletePipelineRequestBuilder extends ActionRequestBuilder<DeletePipelineRequest, AcknowledgedResponse> {
|
||||||
|
|
||||||
public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) {
|
public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) {
|
||||||
super(client, action, new DeletePipelineRequest());
|
super(client, action, new DeletePipelineRequest());
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||||
|
@ -34,7 +35,7 @@ import org.elasticsearch.node.NodeService;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
|
||||||
public class DeletePipelineTransportAction extends TransportMasterNodeAction<DeletePipelineRequest, WritePipelineResponse> {
|
public class DeletePipelineTransportAction extends TransportMasterNodeAction<DeletePipelineRequest, AcknowledgedResponse> {
|
||||||
|
|
||||||
private final PipelineStore pipelineStore;
|
private final PipelineStore pipelineStore;
|
||||||
private final ClusterService clusterService;
|
private final ClusterService clusterService;
|
||||||
|
@ -54,12 +55,12 @@ public class DeletePipelineTransportAction extends TransportMasterNodeAction<Del
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected WritePipelineResponse newResponse() {
|
protected AcknowledgedResponse newResponse() {
|
||||||
return new WritePipelineResponse();
|
return new AcknowledgedResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener<WritePipelineResponse> listener) throws Exception {
|
protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener<AcknowledgedResponse> listener) throws Exception {
|
||||||
pipelineStore.delete(clusterService, request, listener);
|
pipelineStore.delete(clusterService, request, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,8 +20,9 @@
|
||||||
package org.elasticsearch.action.ingest;
|
package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.action.Action;
|
import org.elasticsearch.action.Action;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
|
|
||||||
public class PutPipelineAction extends Action<WritePipelineResponse> {
|
public class PutPipelineAction extends Action<AcknowledgedResponse> {
|
||||||
|
|
||||||
public static final PutPipelineAction INSTANCE = new PutPipelineAction();
|
public static final PutPipelineAction INSTANCE = new PutPipelineAction();
|
||||||
public static final String NAME = "cluster:admin/ingest/pipeline/put";
|
public static final String NAME = "cluster:admin/ingest/pipeline/put";
|
||||||
|
@ -31,7 +32,7 @@ public class PutPipelineAction extends Action<WritePipelineResponse> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public WritePipelineResponse newResponse() {
|
public AcknowledgedResponse newResponse() {
|
||||||
return new WritePipelineResponse();
|
return new AcknowledgedResponse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,11 +20,12 @@
|
||||||
package org.elasticsearch.action.ingest;
|
package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionRequestBuilder;
|
import org.elasticsearch.action.ActionRequestBuilder;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.client.ElasticsearchClient;
|
import org.elasticsearch.client.ElasticsearchClient;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
|
||||||
public class PutPipelineRequestBuilder extends ActionRequestBuilder<PutPipelineRequest, WritePipelineResponse> {
|
public class PutPipelineRequestBuilder extends ActionRequestBuilder<PutPipelineRequest, AcknowledgedResponse> {
|
||||||
|
|
||||||
public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) {
|
public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) {
|
||||||
super(client, action, new PutPipelineRequest());
|
super(client, action, new PutPipelineRequest());
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
@ -43,7 +44,7 @@ import org.elasticsearch.transport.TransportService;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class PutPipelineTransportAction extends TransportMasterNodeAction<PutPipelineRequest, WritePipelineResponse> {
|
public class PutPipelineTransportAction extends TransportMasterNodeAction<PutPipelineRequest, AcknowledgedResponse> {
|
||||||
|
|
||||||
private final PipelineStore pipelineStore;
|
private final PipelineStore pipelineStore;
|
||||||
private final ClusterService clusterService;
|
private final ClusterService clusterService;
|
||||||
|
@ -66,12 +67,12 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction<PutPip
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected WritePipelineResponse newResponse() {
|
protected AcknowledgedResponse newResponse() {
|
||||||
return new WritePipelineResponse();
|
return new AcknowledgedResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener<WritePipelineResponse> listener) throws Exception {
|
protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener<AcknowledgedResponse> listener) throws Exception {
|
||||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest();
|
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest();
|
||||||
nodesInfoRequest.clear();
|
nodesInfoRequest.clear();
|
||||||
nodesInfoRequest.ingest(true);
|
nodesInfoRequest.ingest(true);
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.action.ingest;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
|
|
||||||
public class WritePipelineResponse extends AcknowledgedResponse implements ToXContentObject {
|
|
||||||
|
|
||||||
WritePipelineResponse() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public WritePipelineResponse(boolean acknowledged) {
|
|
||||||
super(acknowledged);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static WritePipelineResponse fromXContent(XContentParser parser) {
|
|
||||||
return new WritePipelineResponse(parseAcknowledged(parser));
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -34,10 +34,9 @@ import java.util.Objects;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract class that allows to mark action responses that support acknowledgements.
|
* A response that indicates that a request has been acknowledged
|
||||||
* Facilitates consistency across different api.
|
|
||||||
*/
|
*/
|
||||||
public abstract class AcknowledgedResponse extends ActionResponse implements ToXContentObject {
|
public class AcknowledgedResponse extends ActionResponse implements ToXContentObject {
|
||||||
|
|
||||||
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||||
|
|
||||||
|
@ -48,11 +47,10 @@ public abstract class AcknowledgedResponse extends ActionResponse implements ToX
|
||||||
|
|
||||||
protected boolean acknowledged;
|
protected boolean acknowledged;
|
||||||
|
|
||||||
protected AcknowledgedResponse() {
|
public AcknowledgedResponse() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected AcknowledgedResponse(boolean acknowledged) {
|
public AcknowledgedResponse(boolean acknowledged) {
|
||||||
this.acknowledged = acknowledged;
|
this.acknowledged = acknowledged;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,10 +98,15 @@ public abstract class AcknowledgedResponse extends ActionResponse implements ToX
|
||||||
ObjectParser.ValueType.BOOLEAN);
|
ObjectParser.ValueType.BOOLEAN);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static boolean parseAcknowledged(XContentParser parser) {
|
@Deprecated
|
||||||
|
public static boolean parseAcknowledged(XContentParser parser) {
|
||||||
return ACKNOWLEDGED_FLAG_PARSER.apply(parser, null);
|
return ACKNOWLEDGED_FLAG_PARSER.apply(parser, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static AcknowledgedResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return new AcknowledgedResponse(ACKNOWLEDGED_FLAG_PARSER.apply(parser, null));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) {
|
if (this == o) {
|
||||||
|
|
|
@ -113,7 +113,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequestBuilder;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.tasks.TaskId;
|
import org.elasticsearch.tasks.TaskId;
|
||||||
|
@ -574,12 +574,12 @@ public interface ClusterAdminClient extends ElasticsearchClient {
|
||||||
/**
|
/**
|
||||||
* Stores an ingest pipeline
|
* Stores an ingest pipeline
|
||||||
*/
|
*/
|
||||||
void putPipeline(PutPipelineRequest request, ActionListener<WritePipelineResponse> listener);
|
void putPipeline(PutPipelineRequest request, ActionListener<AcknowledgedResponse> listener);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stores an ingest pipeline
|
* Stores an ingest pipeline
|
||||||
*/
|
*/
|
||||||
ActionFuture<WritePipelineResponse> putPipeline(PutPipelineRequest request);
|
ActionFuture<AcknowledgedResponse> putPipeline(PutPipelineRequest request);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stores an ingest pipeline
|
* Stores an ingest pipeline
|
||||||
|
@ -596,12 +596,12 @@ public interface ClusterAdminClient extends ElasticsearchClient {
|
||||||
/**
|
/**
|
||||||
* Deletes a stored ingest pipeline
|
* Deletes a stored ingest pipeline
|
||||||
*/
|
*/
|
||||||
void deletePipeline(DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener);
|
void deletePipeline(DeletePipelineRequest request, ActionListener<AcknowledgedResponse> listener);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes a stored ingest pipeline
|
* Deletes a stored ingest pipeline
|
||||||
*/
|
*/
|
||||||
ActionFuture<WritePipelineResponse> deletePipeline(DeletePipelineRequest request);
|
ActionFuture<AcknowledgedResponse> deletePipeline(DeletePipelineRequest request);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes a stored ingest pipeline
|
* Deletes a stored ingest pipeline
|
||||||
|
|
|
@ -307,7 +307,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineAction;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
|
||||||
import org.elasticsearch.action.search.ClearScrollAction;
|
import org.elasticsearch.action.search.ClearScrollAction;
|
||||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||||
import org.elasticsearch.action.search.ClearScrollRequestBuilder;
|
import org.elasticsearch.action.search.ClearScrollRequestBuilder;
|
||||||
|
@ -325,6 +324,7 @@ import org.elasticsearch.action.search.SearchScrollRequest;
|
||||||
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
|
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
|
||||||
import org.elasticsearch.action.support.PlainActionFuture;
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
|
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
|
||||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder;
|
import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder;
|
||||||
|
@ -1082,12 +1082,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void putPipeline(PutPipelineRequest request, ActionListener<WritePipelineResponse> listener) {
|
public void putPipeline(PutPipelineRequest request, ActionListener<AcknowledgedResponse> listener) {
|
||||||
execute(PutPipelineAction.INSTANCE, request, listener);
|
execute(PutPipelineAction.INSTANCE, request, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ActionFuture<WritePipelineResponse> putPipeline(PutPipelineRequest request) {
|
public ActionFuture<AcknowledgedResponse> putPipeline(PutPipelineRequest request) {
|
||||||
return execute(PutPipelineAction.INSTANCE, request);
|
return execute(PutPipelineAction.INSTANCE, request);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1102,12 +1102,12 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void deletePipeline(DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener) {
|
public void deletePipeline(DeletePipelineRequest request, ActionListener<AcknowledgedResponse> listener) {
|
||||||
execute(DeletePipelineAction.INSTANCE, request, listener);
|
execute(DeletePipelineAction.INSTANCE, request, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ActionFuture<WritePipelineResponse> deletePipeline(DeletePipelineRequest request) {
|
public ActionFuture<AcknowledgedResponse> deletePipeline(DeletePipelineRequest request) {
|
||||||
return execute(DeletePipelineAction.INSTANCE, request);
|
return execute(DeletePipelineAction.INSTANCE, request);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,4 +70,5 @@ public class CompoundDateTimeFormatter {
|
||||||
public String format(TemporalAccessor accessor) {
|
public String format(TemporalAccessor accessor) {
|
||||||
return printer.format(accessor);
|
return printer.format(accessor);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -25,7 +25,7 @@ import org.elasticsearch.ResourceNotFoundException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
@ -121,13 +121,13 @@ public class PipelineStore extends AbstractComponent implements ClusterStateAppl
|
||||||
/**
|
/**
|
||||||
* Deletes the pipeline specified by id in the request.
|
* Deletes the pipeline specified by id in the request.
|
||||||
*/
|
*/
|
||||||
public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener) {
|
public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener<AcknowledgedResponse> listener) {
|
||||||
clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(),
|
clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(),
|
||||||
new AckedClusterStateUpdateTask<WritePipelineResponse>(request, listener) {
|
new AckedClusterStateUpdateTask<AcknowledgedResponse>(request, listener) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected WritePipelineResponse newResponse(boolean acknowledged) {
|
protected AcknowledgedResponse newResponse(boolean acknowledged) {
|
||||||
return new WritePipelineResponse(acknowledged);
|
return new AcknowledgedResponse(acknowledged);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -169,15 +169,15 @@ public class PipelineStore extends AbstractComponent implements ClusterStateAppl
|
||||||
* Stores the specified pipeline definition in the request.
|
* Stores the specified pipeline definition in the request.
|
||||||
*/
|
*/
|
||||||
public void put(ClusterService clusterService, Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request,
|
public void put(ClusterService clusterService, Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request,
|
||||||
ActionListener<WritePipelineResponse> listener) throws Exception {
|
ActionListener<AcknowledgedResponse> listener) throws Exception {
|
||||||
// validates the pipeline and processor configuration before submitting a cluster update task:
|
// validates the pipeline and processor configuration before submitting a cluster update task:
|
||||||
validatePipeline(ingestInfos, request);
|
validatePipeline(ingestInfos, request);
|
||||||
clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(),
|
clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(),
|
||||||
new AckedClusterStateUpdateTask<WritePipelineResponse>(request, listener) {
|
new AckedClusterStateUpdateTask<AcknowledgedResponse>(request, listener) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected WritePipelineResponse newResponse(boolean acknowledged) {
|
protected AcknowledgedResponse newResponse(boolean acknowledged) {
|
||||||
return new WritePipelineResponse(acknowledged);
|
return new AcknowledgedResponse(acknowledged);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -67,4 +67,9 @@ public class RestMainAction extends BaseRestHandler {
|
||||||
response.toXContent(builder, request);
|
response.toXContent(builder, request);
|
||||||
return new BytesRestResponse(RestStatus.OK, builder);
|
return new BytesRestResponse(RestStatus.OK, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canTripCircuitBreaker() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.action.ingest;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.equalTo;
|
|
||||||
|
|
||||||
public class WritePipelineResponseTests extends AbstractStreamableXContentTestCase<WritePipelineResponse> {
|
|
||||||
|
|
||||||
public void testSerializationWithoutError() throws IOException {
|
|
||||||
boolean isAcknowledged = randomBoolean();
|
|
||||||
WritePipelineResponse response;
|
|
||||||
response = new WritePipelineResponse(isAcknowledged);
|
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
|
||||||
response.writeTo(out);
|
|
||||||
StreamInput streamInput = out.bytes().streamInput();
|
|
||||||
WritePipelineResponse otherResponse = new WritePipelineResponse();
|
|
||||||
otherResponse.readFrom(streamInput);
|
|
||||||
|
|
||||||
assertThat(otherResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSerializationWithError() throws IOException {
|
|
||||||
WritePipelineResponse response = new WritePipelineResponse();
|
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
|
||||||
response.writeTo(out);
|
|
||||||
StreamInput streamInput = out.bytes().streamInput();
|
|
||||||
WritePipelineResponse otherResponse = new WritePipelineResponse();
|
|
||||||
otherResponse.readFrom(streamInput);
|
|
||||||
|
|
||||||
assertThat(otherResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testToXContent() {
|
|
||||||
WritePipelineResponse response = new WritePipelineResponse(true);
|
|
||||||
String output = Strings.toString(response);
|
|
||||||
assertEquals("{\"acknowledged\":true}", output);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected WritePipelineResponse doParseInstance(XContentParser parser) {
|
|
||||||
return WritePipelineResponse.fromXContent(parser);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected WritePipelineResponse createTestInstance() {
|
|
||||||
return new WritePipelineResponse(randomBoolean());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected WritePipelineResponse createBlankInstance() {
|
|
||||||
return new WritePipelineResponse();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected WritePipelineResponse mutateInstance(WritePipelineResponse response) {
|
|
||||||
return new WritePipelineResponse(response.isAcknowledged() == false);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -453,10 +453,12 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
|
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
|
||||||
assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000));
|
assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli()));
|
||||||
String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate);
|
String javaTimeOut = DateFormatters.forPattern(format).format(javaDate);
|
||||||
String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate);
|
String jodaTimeOut = Joda.forPattern(format).printer().print(jodaDate);
|
||||||
assertThat(javaTimeOut, is(jodaTimeOut));
|
String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]",
|
||||||
|
format, jodaTimeOut, javaTimeOut);
|
||||||
|
assertThat(message, javaTimeOut, is(jodaTimeOut));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertSameDate(String input, String format) {
|
private void assertSameDate(String input, String format) {
|
||||||
|
|
|
@ -34,7 +34,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulateDocumentBaseResult;
|
import org.elasticsearch.action.ingest.SimulateDocumentBaseResult;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
import org.elasticsearch.action.ingest.SimulatePipelineResponse;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.action.update.UpdateRequest;
|
import org.elasticsearch.action.update.UpdateRequest;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
@ -126,7 +126,7 @@ public class IngestClientIT extends ESIntegTestCase {
|
||||||
assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
|
assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
|
||||||
|
|
||||||
// cleanup
|
// cleanup
|
||||||
WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get();
|
AcknowledgedResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get();
|
||||||
assertTrue(deletePipelineResponse.isAcknowledged());
|
assertTrue(deletePipelineResponse.isAcknowledged());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ public class IngestClientIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// cleanup
|
// cleanup
|
||||||
WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get();
|
AcknowledgedResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get();
|
||||||
assertTrue(deletePipelineResponse.isAcknowledged());
|
assertTrue(deletePipelineResponse.isAcknowledged());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,7 +246,7 @@ public class IngestClientIT extends ESIntegTestCase {
|
||||||
assertThat(doc.get("processed"), equalTo(true));
|
assertThat(doc.get("processed"), equalTo(true));
|
||||||
|
|
||||||
DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id");
|
DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id");
|
||||||
WritePipelineResponse response = client().admin().cluster().deletePipeline(deletePipelineRequest).get();
|
AcknowledgedResponse response = client().admin().cluster().deletePipeline(deletePipelineRequest).get();
|
||||||
assertThat(response.isAcknowledged(), is(true));
|
assertThat(response.isAcknowledged(), is(true));
|
||||||
|
|
||||||
getResponse = client().admin().cluster().prepareGetPipeline("_id").get();
|
getResponse = client().admin().cluster().prepareGetPipeline("_id").get();
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.ingest;
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.node.NodeService;
|
import org.elasticsearch.node.NodeService;
|
||||||
|
@ -95,7 +95,7 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase {
|
||||||
installPlugin = true;
|
installPlugin = true;
|
||||||
String node1 = internalCluster().startNode();
|
String node1 = internalCluster().startNode();
|
||||||
|
|
||||||
WritePipelineResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get();
|
AcknowledgedResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get();
|
||||||
assertThat(response.isAcknowledged(), is(true));
|
assertThat(response.isAcknowledged(), is(true));
|
||||||
Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id");
|
Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id");
|
||||||
assertThat(pipeline, notNullValue());
|
assertThat(pipeline, notNullValue());
|
||||||
|
|
|
@ -50,6 +50,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.Rewriteable;
|
import org.elasticsearch.index.query.Rewriteable;
|
||||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||||
|
import org.elasticsearch.index.search.NestedHelper;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.index.shard.ShardUtils;
|
import org.elasticsearch.index.shard.ShardUtils;
|
||||||
|
@ -73,6 +74,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
|
||||||
import static org.apache.lucene.search.BooleanClause.Occur.SHOULD;
|
import static org.apache.lucene.search.BooleanClause.Occur.SHOULD;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -139,6 +141,13 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
|
||||||
Query roleQuery = queryShardContext.toFilter(queryBuilder).query();
|
Query roleQuery = queryShardContext.toFilter(queryBuilder).query();
|
||||||
filter.add(roleQuery, SHOULD);
|
filter.add(roleQuery, SHOULD);
|
||||||
if (queryShardContext.getMapperService().hasNested()) {
|
if (queryShardContext.getMapperService().hasNested()) {
|
||||||
|
NestedHelper nestedHelper = new NestedHelper(queryShardContext.getMapperService());
|
||||||
|
if (nestedHelper.mightMatchNestedDocs(roleQuery)) {
|
||||||
|
roleQuery = new BooleanQuery.Builder()
|
||||||
|
.add(roleQuery, FILTER)
|
||||||
|
.add(Queries.newNonNestedFilter(queryShardContext.indexVersionCreated()), FILTER)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
// If access is allowed on root doc then also access is allowed on all nested docs of that root document:
|
// If access is allowed on root doc then also access is allowed on all nested docs of that root document:
|
||||||
BitSetProducer rootDocs = queryShardContext.bitsetFilter(
|
BitSetProducer rootDocs = queryShardContext.bitsetFilter(
|
||||||
Queries.newNonNestedFilter(queryShardContext.indexVersionCreated()));
|
Queries.newNonNestedFilter(queryShardContext.indexVersionCreated()));
|
||||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||||
import org.elasticsearch.action.ingest.WritePipelineResponse;
|
|
||||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
|
@ -39,6 +38,7 @@ import org.elasticsearch.index.IndexNotFoundException;
|
||||||
import org.elasticsearch.ingest.IngestMetadata;
|
import org.elasticsearch.ingest.IngestMetadata;
|
||||||
import org.elasticsearch.ingest.PipelineConfiguration;
|
import org.elasticsearch.ingest.PipelineConfiguration;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
|
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
||||||
import org.elasticsearch.xpack.core.XPackClient;
|
import org.elasticsearch.xpack.core.XPackClient;
|
||||||
|
@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.XPackSettings;
|
||||||
import org.elasticsearch.xpack.core.monitoring.MonitoredSystem;
|
import org.elasticsearch.xpack.core.monitoring.MonitoredSystem;
|
||||||
import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils;
|
import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils;
|
||||||
import org.elasticsearch.xpack.core.watcher.client.WatcherClient;
|
import org.elasticsearch.xpack.core.watcher.client.WatcherClient;
|
||||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
|
||||||
import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest;
|
import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest;
|
||||||
import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse;
|
import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse;
|
||||||
import org.elasticsearch.xpack.core.watcher.watch.Watch;
|
import org.elasticsearch.xpack.core.watcher.watch.Watch;
|
||||||
|
@ -385,7 +384,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
||||||
* }
|
* }
|
||||||
* </code></pre>
|
* </code></pre>
|
||||||
*/
|
*/
|
||||||
private void putIngestPipeline(final String pipelineId, final ActionListener<WritePipelineResponse> listener) {
|
private void putIngestPipeline(final String pipelineId, final ActionListener<AcknowledgedResponse> listener) {
|
||||||
final String pipelineName = pipelineName(pipelineId);
|
final String pipelineName = pipelineName(pipelineId);
|
||||||
final BytesReference pipeline = BytesReference.bytes(loadPipeline(pipelineId, XContentType.JSON));
|
final BytesReference pipeline = BytesReference.bytes(loadPipeline(pipelineId, XContentType.JSON));
|
||||||
final PutPipelineRequest request = new PutPipelineRequest(pipelineName, pipeline, XContentType.JSON);
|
final PutPipelineRequest request = new PutPipelineRequest(pipelineName, pipeline, XContentType.JSON);
|
||||||
|
|
|
@ -97,7 +97,8 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
|
||||||
return super.configUsers() +
|
return super.configUsers() +
|
||||||
"user1:" + usersPasswdHashed + "\n" +
|
"user1:" + usersPasswdHashed + "\n" +
|
||||||
"user2:" + usersPasswdHashed + "\n" +
|
"user2:" + usersPasswdHashed + "\n" +
|
||||||
"user3:" + usersPasswdHashed + "\n";
|
"user3:" + usersPasswdHashed + "\n" +
|
||||||
|
"user4:" + usersPasswdHashed + "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -105,7 +106,8 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
|
||||||
return super.configUsersRoles() +
|
return super.configUsersRoles() +
|
||||||
"role1:user1,user2,user3\n" +
|
"role1:user1,user2,user3\n" +
|
||||||
"role2:user1,user3\n" +
|
"role2:user1,user3\n" +
|
||||||
"role3:user2,user3\n";
|
"role3:user2,user3\n" +
|
||||||
|
"role4:user4\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -131,7 +133,14 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
|
||||||
" indices:\n" +
|
" indices:\n" +
|
||||||
" - names: '*'\n" +
|
" - names: '*'\n" +
|
||||||
" privileges: [ ALL ]\n" +
|
" privileges: [ ALL ]\n" +
|
||||||
" query: '{\"term\" : {\"field2\" : \"value2\"}}'"; // <-- query defined as json in a string
|
" query: '{\"term\" : {\"field2\" : \"value2\"}}'\n" + // <-- query defined as json in a string
|
||||||
|
"role4:\n" +
|
||||||
|
" cluster: [ all ]\n" +
|
||||||
|
" indices:\n" +
|
||||||
|
" - names: '*'\n" +
|
||||||
|
" privileges: [ ALL ]\n" +
|
||||||
|
// query that can match nested documents
|
||||||
|
" query: '{\"bool\": { \"must_not\": { \"term\" : {\"field1\" : \"value2\"}}}}'";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -869,7 +878,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
|
||||||
refresh("test");
|
refresh("test");
|
||||||
|
|
||||||
SearchResponse response = client()
|
SearchResponse response = client()
|
||||||
.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)))
|
.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD)))
|
||||||
.prepareSearch("test")
|
.prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.nestedQuery("nested_field", QueryBuilders.termQuery("nested_field.field2", "value2"),
|
.setQuery(QueryBuilders.nestedQuery("nested_field", QueryBuilders.termQuery("nested_field.field2", "value2"),
|
||||||
ScoreMode.None).innerHit(new InnerHitBuilder()))
|
ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||||
|
|
|
@ -0,0 +1,400 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.protocol.xpack.ml.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analysis configuration options that describe which fields are
|
||||||
|
* analyzed and which functions are used to detect anomalies.
|
||||||
|
* <p>
|
||||||
|
* The configuration can contain multiple detectors, a new anomaly detector will
|
||||||
|
* be created for each detector configuration. The fields
|
||||||
|
* <code>bucketSpan, summaryCountFieldName and categorizationFieldName</code>
|
||||||
|
* apply to all detectors.
|
||||||
|
* <p>
|
||||||
|
* If a value has not been set it will be <code>null</code>
|
||||||
|
* Object wrappers are used around integral types & booleans so they can take
|
||||||
|
* <code>null</code> values.
|
||||||
|
*/
|
||||||
|
public class AnalysisConfig implements ToXContentObject {
|
||||||
|
/**
|
||||||
|
* Serialisation names
|
||||||
|
*/
|
||||||
|
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||||
|
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||||
|
public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name");
|
||||||
|
public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
|
||||||
|
public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER;
|
||||||
|
public static final ParseField LATENCY = new ParseField("latency");
|
||||||
|
public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
|
||||||
|
public static final ParseField DETECTORS = new ParseField("detectors");
|
||||||
|
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||||
|
public static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets");
|
||||||
|
public static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window");
|
||||||
|
public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<Builder, Void> PARSER = new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(),
|
||||||
|
true, a -> new AnalysisConfig.Builder((List<Detector>) a[0]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
|
||||||
|
(p, c) -> (Detector.PARSER).apply(p, c).build(), DETECTORS);
|
||||||
|
PARSER.declareString((builder, val) ->
|
||||||
|
builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN);
|
||||||
|
PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME);
|
||||||
|
PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS);
|
||||||
|
// This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not
|
||||||
|
// possible to simply declare whether the field is a string or object and a completely custom parser is required
|
||||||
|
PARSER.declareField(Builder::setCategorizationAnalyzerConfig,
|
||||||
|
(p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p),
|
||||||
|
CATEGORIZATION_ANALYZER, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||||
|
PARSER.declareString((builder, val) ->
|
||||||
|
builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY);
|
||||||
|
PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
|
||||||
|
PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS);
|
||||||
|
PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
|
||||||
|
PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
|
||||||
|
PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* These values apply to all detectors
|
||||||
|
*/
|
||||||
|
private final TimeValue bucketSpan;
|
||||||
|
private final String categorizationFieldName;
|
||||||
|
private final List<String> categorizationFilters;
|
||||||
|
private final CategorizationAnalyzerConfig categorizationAnalyzerConfig;
|
||||||
|
private final TimeValue latency;
|
||||||
|
private final String summaryCountFieldName;
|
||||||
|
private final List<Detector> detectors;
|
||||||
|
private final List<String> influencers;
|
||||||
|
private final Boolean overlappingBuckets;
|
||||||
|
private final Long resultFinalizationWindow;
|
||||||
|
private final Boolean multivariateByFields;
|
||||||
|
|
||||||
|
private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, List<String> categorizationFilters,
|
||||||
|
CategorizationAnalyzerConfig categorizationAnalyzerConfig, TimeValue latency, String summaryCountFieldName,
|
||||||
|
List<Detector> detectors, List<String> influencers, Boolean overlappingBuckets, Long resultFinalizationWindow,
|
||||||
|
Boolean multivariateByFields) {
|
||||||
|
this.detectors = Collections.unmodifiableList(detectors);
|
||||||
|
this.bucketSpan = bucketSpan;
|
||||||
|
this.latency = latency;
|
||||||
|
this.categorizationFieldName = categorizationFieldName;
|
||||||
|
this.categorizationAnalyzerConfig = categorizationAnalyzerConfig;
|
||||||
|
this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters);
|
||||||
|
this.summaryCountFieldName = summaryCountFieldName;
|
||||||
|
this.influencers = Collections.unmodifiableList(influencers);
|
||||||
|
this.overlappingBuckets = overlappingBuckets;
|
||||||
|
this.resultFinalizationWindow = resultFinalizationWindow;
|
||||||
|
this.multivariateByFields = multivariateByFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The analysis bucket span
|
||||||
|
*
|
||||||
|
* @return The bucketspan or <code>null</code> if not set
|
||||||
|
*/
|
||||||
|
public TimeValue getBucketSpan() {
|
||||||
|
return bucketSpan;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCategorizationFieldName() {
|
||||||
|
return categorizationFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCategorizationFilters() {
|
||||||
|
return categorizationFilters;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() {
|
||||||
|
return categorizationAnalyzerConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The latency interval during which out-of-order records should be handled.
|
||||||
|
*
|
||||||
|
* @return The latency interval or <code>null</code> if not set
|
||||||
|
*/
|
||||||
|
public TimeValue getLatency() {
|
||||||
|
return latency;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name of the field that contains counts for pre-summarised input
|
||||||
|
*
|
||||||
|
* @return The field name or <code>null</code> if not set
|
||||||
|
*/
|
||||||
|
public String getSummaryCountFieldName() {
|
||||||
|
return summaryCountFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of analysis detectors. In a valid configuration the list should
|
||||||
|
* contain at least 1 {@link Detector}
|
||||||
|
*
|
||||||
|
* @return The Detectors used in this job
|
||||||
|
*/
|
||||||
|
public List<Detector> getDetectors() {
|
||||||
|
return detectors;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of influence field names
|
||||||
|
*/
|
||||||
|
public List<String> getInfluencers() {
|
||||||
|
return influencers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getOverlappingBuckets() {
|
||||||
|
return overlappingBuckets;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getResultFinalizationWindow() {
|
||||||
|
return resultFinalizationWindow;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getMultivariateByFields() {
|
||||||
|
return multivariateByFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void addIfNotNull(Set<String> fields, String field) {
|
||||||
|
if (field != null) {
|
||||||
|
fields.add(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> fields() {
|
||||||
|
return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> collectNonNullAndNonEmptyDetectorFields(
|
||||||
|
Function<Detector, String> fieldGetter) {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
|
||||||
|
for (Detector d : getDetectors()) {
|
||||||
|
addIfNotNull(fields, fieldGetter.apply(d));
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove empty strings
|
||||||
|
fields.remove("");
|
||||||
|
|
||||||
|
return new ArrayList<>(fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> byFields() {
|
||||||
|
return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> overFields() {
|
||||||
|
return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> partitionFields() {
|
||||||
|
return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
if (bucketSpan != null) {
|
||||||
|
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep());
|
||||||
|
}
|
||||||
|
if (categorizationFieldName != null) {
|
||||||
|
builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName);
|
||||||
|
}
|
||||||
|
if (categorizationFilters != null) {
|
||||||
|
builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters);
|
||||||
|
}
|
||||||
|
if (categorizationAnalyzerConfig != null) {
|
||||||
|
// This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params);
|
||||||
|
// because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it
|
||||||
|
// gets written as a single string.
|
||||||
|
categorizationAnalyzerConfig.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
if (latency != null) {
|
||||||
|
builder.field(LATENCY.getPreferredName(), latency.getStringRep());
|
||||||
|
}
|
||||||
|
if (summaryCountFieldName != null) {
|
||||||
|
builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName);
|
||||||
|
}
|
||||||
|
builder.startArray(DETECTORS.getPreferredName());
|
||||||
|
for (Detector detector : detectors) {
|
||||||
|
detector.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
builder.field(INFLUENCERS.getPreferredName(), influencers);
|
||||||
|
if (overlappingBuckets != null) {
|
||||||
|
builder.field(OVERLAPPING_BUCKETS.getPreferredName(), overlappingBuckets);
|
||||||
|
}
|
||||||
|
if (resultFinalizationWindow != null) {
|
||||||
|
builder.field(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow);
|
||||||
|
}
|
||||||
|
if (multivariateByFields != null) {
|
||||||
|
builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object object) {
|
||||||
|
if (this == object) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (object == null || getClass() != object.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
AnalysisConfig that = (AnalysisConfig) object;
|
||||||
|
return Objects.equals(latency, that.latency) &&
|
||||||
|
Objects.equals(bucketSpan, that.bucketSpan) &&
|
||||||
|
Objects.equals(categorizationFieldName, that.categorizationFieldName) &&
|
||||||
|
Objects.equals(categorizationFilters, that.categorizationFilters) &&
|
||||||
|
Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig) &&
|
||||||
|
Objects.equals(summaryCountFieldName, that.summaryCountFieldName) &&
|
||||||
|
Objects.equals(detectors, that.detectors) &&
|
||||||
|
Objects.equals(influencers, that.influencers) &&
|
||||||
|
Objects.equals(overlappingBuckets, that.overlappingBuckets) &&
|
||||||
|
Objects.equals(resultFinalizationWindow, that.resultFinalizationWindow) &&
|
||||||
|
Objects.equals(multivariateByFields, that.multivariateByFields);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(
|
||||||
|
bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, latency,
|
||||||
|
summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow,
|
||||||
|
multivariateByFields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
|
||||||
|
private List<Detector> detectors;
|
||||||
|
private TimeValue bucketSpan;
|
||||||
|
private TimeValue latency;
|
||||||
|
private String categorizationFieldName;
|
||||||
|
private List<String> categorizationFilters;
|
||||||
|
private CategorizationAnalyzerConfig categorizationAnalyzerConfig;
|
||||||
|
private String summaryCountFieldName;
|
||||||
|
private List<String> influencers = new ArrayList<>();
|
||||||
|
private Boolean overlappingBuckets;
|
||||||
|
private Long resultFinalizationWindow;
|
||||||
|
private Boolean multivariateByFields;
|
||||||
|
|
||||||
|
public Builder(List<Detector> detectors) {
|
||||||
|
setDetectors(detectors);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder(AnalysisConfig analysisConfig) {
|
||||||
|
this.detectors = new ArrayList<>(analysisConfig.detectors);
|
||||||
|
this.bucketSpan = analysisConfig.bucketSpan;
|
||||||
|
this.latency = analysisConfig.latency;
|
||||||
|
this.categorizationFieldName = analysisConfig.categorizationFieldName;
|
||||||
|
this.categorizationFilters = analysisConfig.categorizationFilters == null ? null
|
||||||
|
: new ArrayList<>(analysisConfig.categorizationFilters);
|
||||||
|
this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig;
|
||||||
|
this.summaryCountFieldName = analysisConfig.summaryCountFieldName;
|
||||||
|
this.influencers = new ArrayList<>(analysisConfig.influencers);
|
||||||
|
this.overlappingBuckets = analysisConfig.overlappingBuckets;
|
||||||
|
this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow;
|
||||||
|
this.multivariateByFields = analysisConfig.multivariateByFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDetectors(List<Detector> detectors) {
|
||||||
|
Objects.requireNonNull(detectors, "[" + DETECTORS.getPreferredName() + "] must not be null");
|
||||||
|
// We always assign sequential IDs to the detectors that are correct for this analysis config
|
||||||
|
int detectorIndex = 0;
|
||||||
|
List<Detector> sequentialIndexDetectors = new ArrayList<>(detectors.size());
|
||||||
|
for (Detector origDetector : detectors) {
|
||||||
|
Detector.Builder builder = new Detector.Builder(origDetector);
|
||||||
|
builder.setDetectorIndex(detectorIndex++);
|
||||||
|
sequentialIndexDetectors.add(builder.build());
|
||||||
|
}
|
||||||
|
this.detectors = sequentialIndexDetectors;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDetector(int detectorIndex, Detector detector) {
|
||||||
|
detectors.set(detectorIndex, detector);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBucketSpan(TimeValue bucketSpan) {
|
||||||
|
this.bucketSpan = bucketSpan;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLatency(TimeValue latency) {
|
||||||
|
this.latency = latency;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCategorizationFieldName(String categorizationFieldName) {
|
||||||
|
this.categorizationFieldName = categorizationFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCategorizationFilters(List<String> categorizationFilters) {
|
||||||
|
this.categorizationFilters = categorizationFilters;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) {
|
||||||
|
this.categorizationAnalyzerConfig = categorizationAnalyzerConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSummaryCountFieldName(String summaryCountFieldName) {
|
||||||
|
this.summaryCountFieldName = summaryCountFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInfluencers(List<String> influencers) {
|
||||||
|
this.influencers = Objects.requireNonNull(influencers, INFLUENCERS.getPreferredName());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOverlappingBuckets(Boolean overlappingBuckets) {
|
||||||
|
this.overlappingBuckets = overlappingBuckets;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResultFinalizationWindow(Long resultFinalizationWindow) {
|
||||||
|
this.resultFinalizationWindow = resultFinalizationWindow;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMultivariateByFields(Boolean multivariateByFields) {
|
||||||
|
this.multivariateByFields = multivariateByFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AnalysisConfig build() {
|
||||||
|
|
||||||
|
return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig,
|
||||||
|
latency, summaryCountFieldName, detectors, influencers, overlappingBuckets,
|
||||||
|
resultFinalizationWindow, multivariateByFields);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,7 +19,572 @@
|
||||||
package org.elasticsearch.protocol.xpack.ml.job.config;
|
package org.elasticsearch.protocol.xpack.ml.job.config;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
|
||||||
|
|
||||||
public class Job {
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class represents a configured and created Job. The creation time is set
|
||||||
|
* to the time the object was constructed and the finished time and last
|
||||||
|
* data time fields are {@code null} until the job has seen some data or it is
|
||||||
|
* finished respectively.
|
||||||
|
*/
|
||||||
|
public class Job implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector";
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Field names used in serialization
|
||||||
|
*/
|
||||||
public static final ParseField ID = new ParseField("job_id");
|
public static final ParseField ID = new ParseField("job_id");
|
||||||
|
public static final ParseField JOB_TYPE = new ParseField("job_type");
|
||||||
|
public static final ParseField GROUPS = new ParseField("groups");
|
||||||
|
public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG;
|
||||||
|
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
|
||||||
|
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||||
|
public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings");
|
||||||
|
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
|
||||||
|
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||||
|
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
|
||||||
|
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
|
||||||
|
public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory");
|
||||||
|
public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config");
|
||||||
|
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
|
||||||
|
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval");
|
||||||
|
public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days");
|
||||||
|
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
||||||
|
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
||||||
|
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
|
||||||
|
public static final ParseField DELETED = new ParseField("deleted");
|
||||||
|
|
||||||
|
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", true, Builder::new);
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(Builder::setId, ID);
|
||||||
|
PARSER.declareString(Builder::setJobType, JOB_TYPE);
|
||||||
|
PARSER.declareStringArray(Builder::setGroups, GROUPS);
|
||||||
|
PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||||
|
PARSER.declareField(Builder::setCreateTime,
|
||||||
|
(p) -> TimeUtil.parseTimeField(p, CREATE_TIME.getPreferredName()),
|
||||||
|
CREATE_TIME,
|
||||||
|
ValueType.VALUE);
|
||||||
|
PARSER.declareField(Builder::setFinishedTime,
|
||||||
|
(p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()),
|
||||||
|
FINISHED_TIME,
|
||||||
|
ValueType.VALUE);
|
||||||
|
PARSER.declareField(Builder::setLastDataTime,
|
||||||
|
(p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()),
|
||||||
|
LAST_DATA_TIME,
|
||||||
|
ValueType.VALUE);
|
||||||
|
PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY);
|
||||||
|
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
|
||||||
|
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
|
||||||
|
PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION);
|
||||||
|
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG);
|
||||||
|
PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS);
|
||||||
|
PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval(
|
||||||
|
TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL);
|
||||||
|
PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS);
|
||||||
|
PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS);
|
||||||
|
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||||
|
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||||
|
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
||||||
|
PARSER.declareBoolean(Builder::setDeleted, DELETED);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String jobId;
|
||||||
|
private final String jobType;
|
||||||
|
|
||||||
|
private final List<String> groups;
|
||||||
|
private final String description;
|
||||||
|
private final Date createTime;
|
||||||
|
private final Date finishedTime;
|
||||||
|
private final Date lastDataTime;
|
||||||
|
private final Long establishedModelMemory;
|
||||||
|
private final AnalysisConfig analysisConfig;
|
||||||
|
private final AnalysisLimits analysisLimits;
|
||||||
|
private final DataDescription dataDescription;
|
||||||
|
private final ModelPlotConfig modelPlotConfig;
|
||||||
|
private final Long renormalizationWindowDays;
|
||||||
|
private final TimeValue backgroundPersistInterval;
|
||||||
|
private final Long modelSnapshotRetentionDays;
|
||||||
|
private final Long resultsRetentionDays;
|
||||||
|
private final Map<String, Object> customSettings;
|
||||||
|
private final String modelSnapshotId;
|
||||||
|
private final String resultsIndexName;
|
||||||
|
private final boolean deleted;
|
||||||
|
|
||||||
|
private Job(String jobId, String jobType, List<String> groups, String description, Date createTime,
|
||||||
|
Date finishedTime, Date lastDataTime, Long establishedModelMemory,
|
||||||
|
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||||
|
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||||
|
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
||||||
|
String modelSnapshotId, String resultsIndexName, boolean deleted) {
|
||||||
|
|
||||||
|
this.jobId = jobId;
|
||||||
|
this.jobType = jobType;
|
||||||
|
this.groups = Collections.unmodifiableList(groups);
|
||||||
|
this.description = description;
|
||||||
|
this.createTime = createTime;
|
||||||
|
this.finishedTime = finishedTime;
|
||||||
|
this.lastDataTime = lastDataTime;
|
||||||
|
this.establishedModelMemory = establishedModelMemory;
|
||||||
|
this.analysisConfig = analysisConfig;
|
||||||
|
this.analysisLimits = analysisLimits;
|
||||||
|
this.dataDescription = dataDescription;
|
||||||
|
this.modelPlotConfig = modelPlotConfig;
|
||||||
|
this.renormalizationWindowDays = renormalizationWindowDays;
|
||||||
|
this.backgroundPersistInterval = backgroundPersistInterval;
|
||||||
|
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
|
||||||
|
this.resultsRetentionDays = resultsRetentionDays;
|
||||||
|
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
|
||||||
|
this.modelSnapshotId = modelSnapshotId;
|
||||||
|
this.resultsIndexName = resultsIndexName;
|
||||||
|
this.deleted = deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the Job Id.
|
||||||
|
*
|
||||||
|
* @return The job Id string
|
||||||
|
*/
|
||||||
|
public String getId() {
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJobType() {
|
||||||
|
return jobType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getGroups() {
|
||||||
|
return groups;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Private version of getResultsIndexName so that a job can be built from another
|
||||||
|
* job and pass index name validation
|
||||||
|
*
|
||||||
|
* @return The job's index name, minus prefix
|
||||||
|
*/
|
||||||
|
private String getResultsIndexNameNoPrefix() {
|
||||||
|
return resultsIndexName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The job description
|
||||||
|
*
|
||||||
|
* @return job description
|
||||||
|
*/
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The Job creation time. This name is preferred when serialising to the
|
||||||
|
* REST API.
|
||||||
|
*
|
||||||
|
* @return The date the job was created
|
||||||
|
*/
|
||||||
|
public Date getCreateTime() {
|
||||||
|
return createTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The time the job was finished or <code>null</code> if not finished.
|
||||||
|
*
|
||||||
|
* @return The date the job was last retired or <code>null</code>
|
||||||
|
*/
|
||||||
|
public Date getFinishedTime() {
|
||||||
|
return finishedTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The last time data was uploaded to the job or <code>null</code> if no
|
||||||
|
* data has been seen.
|
||||||
|
*
|
||||||
|
* @return The date at which the last data was processed
|
||||||
|
*/
|
||||||
|
public Date getLastDataTime() {
|
||||||
|
return lastDataTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The established model memory of the job, or <code>null</code> if model
|
||||||
|
* memory has not reached equilibrium yet.
|
||||||
|
*
|
||||||
|
* @return The established model memory of the job
|
||||||
|
*/
|
||||||
|
public Long getEstablishedModelMemory() {
|
||||||
|
return establishedModelMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The analysis configuration object
|
||||||
|
*
|
||||||
|
* @return The AnalysisConfig
|
||||||
|
*/
|
||||||
|
public AnalysisConfig getAnalysisConfig() {
|
||||||
|
return analysisConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The analysis options object
|
||||||
|
*
|
||||||
|
* @return The AnalysisLimits
|
||||||
|
*/
|
||||||
|
public AnalysisLimits getAnalysisLimits() {
|
||||||
|
return analysisLimits;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ModelPlotConfig getModelPlotConfig() {
|
||||||
|
return modelPlotConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If not set the input data is assumed to be csv with a '_time' field in
|
||||||
|
* epoch format.
|
||||||
|
*
|
||||||
|
* @return A DataDescription or <code>null</code>
|
||||||
|
* @see DataDescription
|
||||||
|
*/
|
||||||
|
public DataDescription getDataDescription() {
|
||||||
|
return dataDescription;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The duration of the renormalization window in days
|
||||||
|
*
|
||||||
|
* @return renormalization window in days
|
||||||
|
*/
|
||||||
|
public Long getRenormalizationWindowDays() {
|
||||||
|
return renormalizationWindowDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The background persistence interval
|
||||||
|
*
|
||||||
|
* @return background persistence interval
|
||||||
|
*/
|
||||||
|
public TimeValue getBackgroundPersistInterval() {
|
||||||
|
return backgroundPersistInterval;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getModelSnapshotRetentionDays() {
|
||||||
|
return modelSnapshotRetentionDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getResultsRetentionDays() {
|
||||||
|
return resultsRetentionDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, Object> getCustomSettings() {
|
||||||
|
return customSettings;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getModelSnapshotId() {
|
||||||
|
return modelSnapshotId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isDeleted() {
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
final String humanReadableSuffix = "_string";
|
||||||
|
|
||||||
|
builder.field(ID.getPreferredName(), jobId);
|
||||||
|
builder.field(JOB_TYPE.getPreferredName(), jobType);
|
||||||
|
|
||||||
|
if (groups.isEmpty() == false) {
|
||||||
|
builder.field(GROUPS.getPreferredName(), groups);
|
||||||
|
}
|
||||||
|
if (description != null) {
|
||||||
|
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||||
|
}
|
||||||
|
builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime());
|
||||||
|
if (finishedTime != null) {
|
||||||
|
builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix,
|
||||||
|
finishedTime.getTime());
|
||||||
|
}
|
||||||
|
if (lastDataTime != null) {
|
||||||
|
builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix,
|
||||||
|
lastDataTime.getTime());
|
||||||
|
}
|
||||||
|
if (establishedModelMemory != null) {
|
||||||
|
builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory);
|
||||||
|
}
|
||||||
|
builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params);
|
||||||
|
if (analysisLimits != null) {
|
||||||
|
builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params);
|
||||||
|
}
|
||||||
|
if (dataDescription != null) {
|
||||||
|
builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params);
|
||||||
|
}
|
||||||
|
if (modelPlotConfig != null) {
|
||||||
|
builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params);
|
||||||
|
}
|
||||||
|
if (renormalizationWindowDays != null) {
|
||||||
|
builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays);
|
||||||
|
}
|
||||||
|
if (backgroundPersistInterval != null) {
|
||||||
|
builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep());
|
||||||
|
}
|
||||||
|
if (modelSnapshotRetentionDays != null) {
|
||||||
|
builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays);
|
||||||
|
}
|
||||||
|
if (resultsRetentionDays != null) {
|
||||||
|
builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays);
|
||||||
|
}
|
||||||
|
if (customSettings != null) {
|
||||||
|
builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings);
|
||||||
|
}
|
||||||
|
if (modelSnapshotId != null) {
|
||||||
|
builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId);
|
||||||
|
}
|
||||||
|
if (resultsIndexName != null) {
|
||||||
|
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
|
||||||
|
}
|
||||||
|
if (params.paramAsBoolean("all", false)) {
|
||||||
|
builder.field(DELETED.getPreferredName(), deleted);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (other == null || getClass() != other.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Job that = (Job) other;
|
||||||
|
return Objects.equals(this.jobId, that.jobId)
|
||||||
|
&& Objects.equals(this.jobType, that.jobType)
|
||||||
|
&& Objects.equals(this.groups, that.groups)
|
||||||
|
&& Objects.equals(this.description, that.description)
|
||||||
|
&& Objects.equals(this.createTime, that.createTime)
|
||||||
|
&& Objects.equals(this.finishedTime, that.finishedTime)
|
||||||
|
&& Objects.equals(this.lastDataTime, that.lastDataTime)
|
||||||
|
&& Objects.equals(this.establishedModelMemory, that.establishedModelMemory)
|
||||||
|
&& Objects.equals(this.analysisConfig, that.analysisConfig)
|
||||||
|
&& Objects.equals(this.analysisLimits, that.analysisLimits)
|
||||||
|
&& Objects.equals(this.dataDescription, that.dataDescription)
|
||||||
|
&& Objects.equals(this.modelPlotConfig, that.modelPlotConfig)
|
||||||
|
&& Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays)
|
||||||
|
&& Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval)
|
||||||
|
&& Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays)
|
||||||
|
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
||||||
|
&& Objects.equals(this.customSettings, that.customSettings)
|
||||||
|
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
|
||||||
|
&& Objects.equals(this.resultsIndexName, that.resultsIndexName)
|
||||||
|
&& Objects.equals(this.deleted, that.deleted);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||||
|
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||||
|
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||||
|
modelSnapshotId, resultsIndexName, deleted);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
private String jobType = ANOMALY_DETECTOR_JOB_TYPE;
|
||||||
|
private List<String> groups = Collections.emptyList();
|
||||||
|
private String description;
|
||||||
|
private AnalysisConfig analysisConfig;
|
||||||
|
private AnalysisLimits analysisLimits;
|
||||||
|
private DataDescription dataDescription;
|
||||||
|
private Date createTime;
|
||||||
|
private Date finishedTime;
|
||||||
|
private Date lastDataTime;
|
||||||
|
private Long establishedModelMemory;
|
||||||
|
private ModelPlotConfig modelPlotConfig;
|
||||||
|
private Long renormalizationWindowDays;
|
||||||
|
private TimeValue backgroundPersistInterval;
|
||||||
|
private Long modelSnapshotRetentionDays;
|
||||||
|
private Long resultsRetentionDays;
|
||||||
|
private Map<String, Object> customSettings;
|
||||||
|
private String modelSnapshotId;
|
||||||
|
private String resultsIndexName;
|
||||||
|
private boolean deleted;
|
||||||
|
|
||||||
|
public Builder() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder(Job job) {
|
||||||
|
this.id = job.getId();
|
||||||
|
this.jobType = job.getJobType();
|
||||||
|
this.groups = job.getGroups();
|
||||||
|
this.description = job.getDescription();
|
||||||
|
this.analysisConfig = job.getAnalysisConfig();
|
||||||
|
this.analysisLimits = job.getAnalysisLimits();
|
||||||
|
this.dataDescription = job.getDataDescription();
|
||||||
|
this.createTime = job.getCreateTime();
|
||||||
|
this.finishedTime = job.getFinishedTime();
|
||||||
|
this.lastDataTime = job.getLastDataTime();
|
||||||
|
this.establishedModelMemory = job.getEstablishedModelMemory();
|
||||||
|
this.modelPlotConfig = job.getModelPlotConfig();
|
||||||
|
this.renormalizationWindowDays = job.getRenormalizationWindowDays();
|
||||||
|
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
|
||||||
|
this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays();
|
||||||
|
this.resultsRetentionDays = job.getResultsRetentionDays();
|
||||||
|
this.customSettings = job.getCustomSettings();
|
||||||
|
this.modelSnapshotId = job.getModelSnapshotId();
|
||||||
|
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
|
||||||
|
this.deleted = job.isDeleted();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setJobType(String jobType) {
|
||||||
|
this.jobType = jobType;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setGroups(List<String> groups) {
|
||||||
|
this.groups = groups == null ? Collections.emptyList() : groups;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setCustomSettings(Map<String, Object> customSettings) {
|
||||||
|
this.customSettings = customSettings;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) {
|
||||||
|
analysisConfig = Objects.requireNonNull(configBuilder, ANALYSIS_CONFIG.getPreferredName()).build();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setAnalysisLimits(AnalysisLimits analysisLimits) {
|
||||||
|
this.analysisLimits = Objects.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName());
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Builder setCreateTime(Date createTime) {
|
||||||
|
this.createTime = createTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Builder setFinishedTime(Date finishedTime) {
|
||||||
|
this.finishedTime = finishedTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the wall clock time of the last data upload
|
||||||
|
*
|
||||||
|
* @param lastDataTime Wall clock time
|
||||||
|
*/
|
||||||
|
public Builder setLastDataTime(Date lastDataTime) {
|
||||||
|
this.lastDataTime = lastDataTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setEstablishedModelMemory(Long establishedModelMemory) {
|
||||||
|
this.establishedModelMemory = establishedModelMemory;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setDataDescription(DataDescription.Builder description) {
|
||||||
|
dataDescription = Objects.requireNonNull(description, DATA_DESCRIPTION.getPreferredName()).build();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) {
|
||||||
|
this.modelPlotConfig = modelPlotConfig;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) {
|
||||||
|
this.backgroundPersistInterval = backgroundPersistInterval;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) {
|
||||||
|
this.renormalizationWindowDays = renormalizationWindowDays;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) {
|
||||||
|
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setResultsRetentionDays(Long resultsRetentionDays) {
|
||||||
|
this.resultsRetentionDays = resultsRetentionDays;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setModelSnapshotId(String modelSnapshotId) {
|
||||||
|
this.modelSnapshotId = modelSnapshotId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setResultsIndexName(String resultsIndexName) {
|
||||||
|
this.resultsIndexName = resultsIndexName;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setDeleted(boolean deleted) {
|
||||||
|
this.deleted = deleted;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a job.
|
||||||
|
*
|
||||||
|
* @return The job
|
||||||
|
*/
|
||||||
|
public Job build() {
|
||||||
|
Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null");
|
||||||
|
Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null");
|
||||||
|
return new Job(
|
||||||
|
id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||||
|
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||||
|
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||||
|
modelSnapshotId, resultsIndexName, deleted);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||||
import org.elasticsearch.protocol.xpack.ml.job.results.Result;
|
import org.elasticsearch.protocol.xpack.ml.job.results.Result;
|
||||||
|
import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
* specific language governing permissions and limitations
|
* specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.protocol.xpack.ml.job.process;
|
package org.elasticsearch.protocol.xpack.ml.job.util;
|
||||||
|
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
import org.elasticsearch.common.time.DateFormatters;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
@ -25,7 +25,7 @@ import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
final class TimeUtil {
|
public final class TimeUtil {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse out a Date object given the current parser and field name.
|
* Parse out a Date object given the current parser and field name.
|
||||||
|
@ -35,7 +35,7 @@ final class TimeUtil {
|
||||||
* @return parsed Date object
|
* @return parsed Date object
|
||||||
* @throws IOException from XContentParser
|
* @throws IOException from XContentParser
|
||||||
*/
|
*/
|
||||||
static Date parseTimeField(XContentParser parser, String fieldName) throws IOException {
|
public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException {
|
||||||
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||||
return new Date(parser.longValue());
|
return new Date(parser.longValue());
|
||||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
|
@ -0,0 +1,268 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.protocol.xpack.ml.job.config;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.SearchModule;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class AnalysisConfigTests extends AbstractXContentTestCase<AnalysisConfig> {
|
||||||
|
|
||||||
|
public static AnalysisConfig.Builder createRandomized() {
|
||||||
|
boolean isCategorization = randomBoolean();
|
||||||
|
List<Detector> detectors = new ArrayList<>();
|
||||||
|
int numDetectors = randomIntBetween(1, 10);
|
||||||
|
for (int i = 0; i < numDetectors; i++) {
|
||||||
|
Detector.Builder builder = new Detector.Builder("count", null);
|
||||||
|
builder.setPartitionFieldName(isCategorization ? "mlcategory" : "part");
|
||||||
|
detectors.add(builder.build());
|
||||||
|
}
|
||||||
|
AnalysisConfig.Builder builder = new AnalysisConfig.Builder(detectors);
|
||||||
|
if (randomBoolean()) {
|
||||||
|
TimeValue bucketSpan = TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000));
|
||||||
|
builder.setBucketSpan(bucketSpan);
|
||||||
|
}
|
||||||
|
if (isCategorization) {
|
||||||
|
builder.setCategorizationFieldName(randomAlphaOfLength(10));
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setCategorizationFilters(Arrays.asList(generateRandomStringArray(10, 10, false)));
|
||||||
|
} else {
|
||||||
|
CategorizationAnalyzerConfig.Builder analyzerBuilder = new CategorizationAnalyzerConfig.Builder();
|
||||||
|
if (rarely()) {
|
||||||
|
analyzerBuilder.setAnalyzer(randomAlphaOfLength(10));
|
||||||
|
} else {
|
||||||
|
if (randomBoolean()) {
|
||||||
|
for (String pattern : generateRandomStringArray(3, 40, false)) {
|
||||||
|
Map<String, Object> charFilter = new HashMap<>();
|
||||||
|
charFilter.put("type", "pattern_replace");
|
||||||
|
charFilter.put("pattern", pattern);
|
||||||
|
analyzerBuilder.addCharFilter(charFilter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> tokenizer = new HashMap<>();
|
||||||
|
tokenizer.put("type", "pattern");
|
||||||
|
tokenizer.put("pattern", randomAlphaOfLength(10));
|
||||||
|
analyzerBuilder.setTokenizer(tokenizer);
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
for (String pattern : generateRandomStringArray(4, 40, false)) {
|
||||||
|
Map<String, Object> tokenFilter = new HashMap<>();
|
||||||
|
tokenFilter.put("type", "pattern_replace");
|
||||||
|
tokenFilter.put("pattern", pattern);
|
||||||
|
analyzerBuilder.addTokenFilter(tokenFilter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder.setCategorizationAnalyzerConfig(analyzerBuilder.build());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setLatency(TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000)));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setMultivariateByFields(randomBoolean());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setOverlappingBuckets(randomBoolean());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setResultFinalizationWindow(randomNonNegativeLong());
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false)));
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AnalysisConfig createTestInstance() {
|
||||||
|
return createRandomized().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AnalysisConfig doParseInstance(XContentParser parser) {
|
||||||
|
return AnalysisConfig.PARSER.apply(parser, null).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBuilder_WithNullDetectors() {
|
||||||
|
AnalysisConfig.Builder builder = new AnalysisConfig.Builder(new ArrayList<>());
|
||||||
|
NullPointerException ex = expectThrows(NullPointerException.class, () -> builder.setDetectors(null));
|
||||||
|
assertEquals("[detectors] must not be null", ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenSameReference() {
|
||||||
|
AnalysisConfig config = createRandomized().build();
|
||||||
|
assertTrue(config.equals(config));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentClass() {
|
||||||
|
assertFalse(createRandomized().build().equals("a string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenNull() {
|
||||||
|
assertFalse(createRandomized().build().equals(null));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenEqualConfig() {
|
||||||
|
AnalysisConfig config1 = createValidCategorizationConfig().build();
|
||||||
|
AnalysisConfig config2 = createValidCategorizationConfig().build();
|
||||||
|
|
||||||
|
assertTrue(config1.equals(config2));
|
||||||
|
assertTrue(config2.equals(config1));
|
||||||
|
assertEquals(config1.hashCode(), config2.hashCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentBucketSpan() {
|
||||||
|
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||||
|
builder.setBucketSpan(TimeValue.timeValueSeconds(1800));
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createConfigBuilder();
|
||||||
|
builder.setBucketSpan(TimeValue.timeValueHours(1));
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenCategorizationField() {
|
||||||
|
AnalysisConfig.Builder builder = createValidCategorizationConfig();
|
||||||
|
builder.setCategorizationFieldName("foo");
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createValidCategorizationConfig();
|
||||||
|
builder.setCategorizationFieldName("bar");
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentDetector() {
|
||||||
|
AnalysisConfig config1 = createConfigWithDetectors(Collections.singletonList(new Detector.Builder("min", "low_count").build()));
|
||||||
|
|
||||||
|
AnalysisConfig config2 = createConfigWithDetectors(Collections.singletonList(new Detector.Builder("min", "high_count").build()));
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentInfluencers() {
|
||||||
|
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||||
|
builder.setInfluencers(Collections.singletonList("foo"));
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createConfigBuilder();
|
||||||
|
builder.setInfluencers(Collections.singletonList("bar"));
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentLatency() {
|
||||||
|
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||||
|
builder.setLatency(TimeValue.timeValueSeconds(1800));
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createConfigBuilder();
|
||||||
|
builder.setLatency(TimeValue.timeValueSeconds(1801));
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenSummaryCountField() {
|
||||||
|
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||||
|
builder.setSummaryCountFieldName("foo");
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createConfigBuilder();
|
||||||
|
builder.setSummaryCountFieldName("bar");
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenMultivariateByField() {
|
||||||
|
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||||
|
builder.setMultivariateByFields(true);
|
||||||
|
AnalysisConfig config1 = builder.build();
|
||||||
|
|
||||||
|
builder = createConfigBuilder();
|
||||||
|
builder.setMultivariateByFields(false);
|
||||||
|
AnalysisConfig config2 = builder.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentCategorizationFilters() {
|
||||||
|
AnalysisConfig.Builder configBuilder1 = createValidCategorizationConfig();
|
||||||
|
AnalysisConfig.Builder configBuilder2 = createValidCategorizationConfig();
|
||||||
|
configBuilder1.setCategorizationFilters(Arrays.asList("foo", "bar"));
|
||||||
|
configBuilder2.setCategorizationFilters(Arrays.asList("foo", "foobar"));
|
||||||
|
AnalysisConfig config1 = configBuilder1.build();
|
||||||
|
AnalysisConfig config2 = configBuilder2.build();
|
||||||
|
|
||||||
|
assertFalse(config1.equals(config2));
|
||||||
|
assertFalse(config2.equals(config1));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AnalysisConfig createConfigWithDetectors(List<Detector> detectors) {
|
||||||
|
return new AnalysisConfig.Builder(detectors).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AnalysisConfig.Builder createConfigBuilder() {
|
||||||
|
return new AnalysisConfig.Builder(Collections.singletonList(new Detector.Builder("min", "count").build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AnalysisConfig.Builder createValidCategorizationConfig() {
|
||||||
|
Detector.Builder detector = new Detector.Builder("count", null);
|
||||||
|
detector.setByFieldName("mlcategory");
|
||||||
|
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||||
|
analysisConfig.setBucketSpan(TimeValue.timeValueHours(1));
|
||||||
|
analysisConfig.setLatency(TimeValue.ZERO);
|
||||||
|
analysisConfig.setCategorizationFieldName("msg");
|
||||||
|
return analysisConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedXContentRegistry xContentRegistry() {
|
||||||
|
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||||
|
return new NamedXContentRegistry(searchModule.getNamedXContents());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,276 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.protocol.xpack.ml.job.config;
|
||||||
|
|
||||||
|
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.search.SearchModule;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class JobTests extends AbstractXContentTestCase<Job> {
|
||||||
|
|
||||||
|
private static final String FUTURE_JOB = "{\n" +
|
||||||
|
" \"job_id\": \"farequote\",\n" +
|
||||||
|
" \"create_time\": 1234567890000,\n" +
|
||||||
|
" \"tomorrows_technology_today\": \"wow\",\n" +
|
||||||
|
" \"analysis_config\": {\n" +
|
||||||
|
" \"bucket_span\": \"1h\",\n" +
|
||||||
|
" \"something_new\": \"gasp\",\n" +
|
||||||
|
" \"detectors\": [{\"function\": \"metric\", \"field_name\": \"responsetime\", \"by_field_name\": \"airline\"}]\n" +
|
||||||
|
" },\n" +
|
||||||
|
" \"data_description\": {\n" +
|
||||||
|
" \"time_field\": \"time\",\n" +
|
||||||
|
" \"the_future\": 123\n" +
|
||||||
|
" }\n" +
|
||||||
|
"}";
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Job createTestInstance() {
|
||||||
|
return createRandomizedJob();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Job doParseInstance(XContentParser parser) {
|
||||||
|
return Job.PARSER.apply(parser, null).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFutureMetadataParse() throws IOException {
|
||||||
|
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
|
||||||
|
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_JOB);
|
||||||
|
// The parser should tolerate unknown fields
|
||||||
|
assertNotNull(Job.PARSER.apply(parser, null).build());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentClass() {
|
||||||
|
Job job = buildJobBuilder("foo").build();
|
||||||
|
assertFalse(job.equals("a string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentIds() {
|
||||||
|
Date createTime = new Date();
|
||||||
|
Job.Builder builder = buildJobBuilder("foo");
|
||||||
|
builder.setCreateTime(createTime);
|
||||||
|
Job job1 = builder.build();
|
||||||
|
builder.setId("bar");
|
||||||
|
Job job2 = builder.build();
|
||||||
|
assertFalse(job1.equals(job2));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentRenormalizationWindowDays() {
|
||||||
|
Date date = new Date();
|
||||||
|
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||||
|
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails1.setRenormalizationWindowDays(3L);
|
||||||
|
jobDetails1.setCreateTime(date);
|
||||||
|
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||||
|
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails2.setRenormalizationWindowDays(4L);
|
||||||
|
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails2.setCreateTime(date);
|
||||||
|
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentBackgroundPersistInterval() {
|
||||||
|
Date date = new Date();
|
||||||
|
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||||
|
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L));
|
||||||
|
jobDetails1.setCreateTime(date);
|
||||||
|
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||||
|
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L));
|
||||||
|
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails2.setCreateTime(date);
|
||||||
|
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentModelSnapshotRetentionDays() {
|
||||||
|
Date date = new Date();
|
||||||
|
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||||
|
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails1.setModelSnapshotRetentionDays(10L);
|
||||||
|
jobDetails1.setCreateTime(date);
|
||||||
|
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||||
|
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails2.setModelSnapshotRetentionDays(8L);
|
||||||
|
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails2.setCreateTime(date);
|
||||||
|
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentResultsRetentionDays() {
|
||||||
|
Date date = new Date();
|
||||||
|
Job.Builder jobDetails1 = new Job.Builder("foo");
|
||||||
|
jobDetails1.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails1.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails1.setCreateTime(date);
|
||||||
|
jobDetails1.setResultsRetentionDays(30L);
|
||||||
|
Job.Builder jobDetails2 = new Job.Builder("foo");
|
||||||
|
jobDetails2.setDataDescription(new DataDescription.Builder());
|
||||||
|
jobDetails2.setResultsRetentionDays(4L);
|
||||||
|
jobDetails2.setAnalysisConfig(createAnalysisConfig());
|
||||||
|
jobDetails2.setCreateTime(date);
|
||||||
|
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEquals_GivenDifferentCustomSettings() {
|
||||||
|
Job.Builder jobDetails1 = buildJobBuilder("foo");
|
||||||
|
Map<String, Object> customSettings1 = new HashMap<>();
|
||||||
|
customSettings1.put("key1", "value1");
|
||||||
|
jobDetails1.setCustomSettings(customSettings1);
|
||||||
|
Job.Builder jobDetails2 = buildJobBuilder("foo");
|
||||||
|
Map<String, Object> customSettings2 = new HashMap<>();
|
||||||
|
customSettings2.put("key2", "value2");
|
||||||
|
jobDetails2.setCustomSettings(customSettings2);
|
||||||
|
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCopyConstructor() {
|
||||||
|
for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) {
|
||||||
|
Job job = createTestInstance();
|
||||||
|
Job copy = new Job.Builder(job).build();
|
||||||
|
assertEquals(job, copy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBuilder_WithNullID() {
|
||||||
|
Job.Builder builder = new Job.Builder("anything").setId(null);
|
||||||
|
NullPointerException ex = expectThrows(NullPointerException.class, builder::build);
|
||||||
|
assertEquals("[job_id] must not be null", ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBuilder_WithNullJobType() {
|
||||||
|
Job.Builder builder = new Job.Builder("anything").setJobType(null);
|
||||||
|
NullPointerException ex = expectThrows(NullPointerException.class, builder::build);
|
||||||
|
assertEquals("[job_type] must not be null", ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Job.Builder buildJobBuilder(String id, Date date) {
|
||||||
|
Job.Builder builder = new Job.Builder(id);
|
||||||
|
builder.setCreateTime(date);
|
||||||
|
AnalysisConfig.Builder ac = createAnalysisConfig();
|
||||||
|
DataDescription.Builder dc = new DataDescription.Builder();
|
||||||
|
builder.setAnalysisConfig(ac);
|
||||||
|
builder.setDataDescription(dc);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Job.Builder buildJobBuilder(String id) {
|
||||||
|
return buildJobBuilder(id, new Date());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String randomValidJobId() {
|
||||||
|
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
|
||||||
|
return generator.ofCodePointsLength(random(), 10, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static AnalysisConfig.Builder createAnalysisConfig() {
|
||||||
|
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||||
|
d1.setOverFieldName("client");
|
||||||
|
Detector.Builder d2 = new Detector.Builder("min", "field");
|
||||||
|
return new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Job createRandomizedJob() {
|
||||||
|
String jobId = randomValidJobId();
|
||||||
|
Job.Builder builder = new Job.Builder(jobId);
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setDescription(randomAlphaOfLength(10));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
int groupsNum = randomIntBetween(0, 10);
|
||||||
|
List<String> groups = new ArrayList<>(groupsNum);
|
||||||
|
for (int i = 0; i < groupsNum; i++) {
|
||||||
|
groups.add(randomValidJobId());
|
||||||
|
}
|
||||||
|
builder.setGroups(groups);
|
||||||
|
}
|
||||||
|
builder.setCreateTime(new Date(randomNonNegativeLong()));
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setFinishedTime(new Date(randomNonNegativeLong()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setLastDataTime(new Date(randomNonNegativeLong()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setEstablishedModelMemory(randomNonNegativeLong());
|
||||||
|
}
|
||||||
|
builder.setAnalysisConfig(AnalysisConfigTests.createRandomized());
|
||||||
|
builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized());
|
||||||
|
|
||||||
|
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||||
|
dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values()));
|
||||||
|
builder.setDataDescription(dataDescription);
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10)));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setRenormalizationWindowDays(randomNonNegativeLong());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setBackgroundPersistInterval(TimeValue.timeValueHours(randomIntBetween(1, 24)));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setModelSnapshotRetentionDays(randomNonNegativeLong());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setResultsRetentionDays(randomNonNegativeLong());
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setCustomSettings(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setModelSnapshotId(randomAlphaOfLength(10));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
builder.setResultsIndexName(randomValidJobId());
|
||||||
|
}
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedXContentRegistry xContentRegistry() {
|
||||||
|
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||||
|
return new NamedXContentRegistry(searchModule.getNamedXContents());
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue