Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-08-08 07:21:01 -07:00
commit 2d925c9a9a
153 changed files with 4745 additions and 2406 deletions

View File

@ -4,7 +4,7 @@ lucene = 7.5.0-snapshot-608f0277b0
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
jackson = 2.8.10
jackson = 2.8.11
snakeyaml = 1.17
# when updating log4j, please update also docs/java-api/index.asciidoc
log4j = 2.11.1

View File

@ -163,8 +163,11 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipel
import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import java.io.Closeable;
import java.io.IOException;
@ -1151,11 +1154,11 @@ public class RestHighLevelClient implements Closeable {
List<NamedXContentRegistry.Entry> entries = map.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestion.NAME),
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestionBuilder.SUGGESTION_NAME),
(parser, context) -> TermSuggestion.fromXContent(parser, (String)context)));
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestion.NAME),
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestionBuilder.SUGGESTION_NAME),
(parser, context) -> PhraseSuggestion.fromXContent(parser, (String)context)));
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestion.NAME),
entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestionBuilder.SUGGESTION_NAME),
(parser, context) -> CompletionSuggestion.fromXContent(parser, (String)context)));
return entries;
}

View File

@ -1 +0,0 @@
eb21a035c66ad307e66ec8fce37f5d50fd62d039

View File

@ -0,0 +1 @@
876ead1db19f0c9e79c9789273a3ef8c6fd6c29b

View File

@ -6,6 +6,7 @@ After=network-online.target
[Service]
RuntimeDirectory=elasticsearch
PrivateTmp=true
Environment=ES_HOME=/usr/share/elasticsearch
Environment=ES_PATH_CONF=${path.conf}
Environment=PID_DIR=/var/run/elasticsearch

View File

@ -21,4 +21,10 @@ Aggregations::
* The Percentiles and PercentileRanks aggregations now return `null` in the REST response,
instead of `NaN`. This makes it consistent with the rest of the aggregations. Note:
this only applies to the REST response, the java objects continue to return `NaN` (also
consistent with other aggregations)
consistent with other aggregations)
Suggesters::
* Plugins that register suggesters can now define their own types of suggestions and must
explicitly indicate the type of suggestion that they produce. Existing plugins will
require changes to their plugin registration. See the `custom-suggester` example
plugin {pull}30284[#30284]

View File

@ -14,6 +14,7 @@ The following settings *must* be considered before going to production:
* <<heap-size,Heap size>>
* <<heap-dump-path,Heap dump path>>
* <<gc-logging,GC logging>>
* <<es-tmpdir,Temp directory>>
include::important-settings/path-settings.asciidoc[]
@ -31,4 +32,6 @@ include::important-settings/heap-dump-path.asciidoc[]
include::important-settings/gc-logging.asciidoc[]
include::important-settings/es-tmpdir.asciidoc[]
include::important-settings/error-file.asciidoc[]

View File

@ -0,0 +1,23 @@
[[es-tmpdir]]
=== Temp directory
By default, Elasticsearch uses a private temporary directory that the startup
script creates immediately below the system temporary directory.
On some Linux distributions a system utility will clean files and directories
from `/tmp` if they have not been recently accessed. This can lead to the
private temporary directory being removed while Elasticsearch is running if
features that require the temporary directory are not used for a long time.
This causes problems if a feature that requires the temporary directory is
subsequently used.
If you install Elasticsearch using the `.deb` or `.rpm` packages and run it
under `systemd` then the private temporary directory that Elasticsearch uses
is excluded from periodic cleanup.
However, if you intend to run the `.tar.gz` distribution on Linux for an
extended period then you should consider creating a dedicated temporary
directory for Elasticsearch that is not under a path that will have old files
and directories cleaned from it. This directory should have permissions set
so that only the user that Elasticsearch runs as can access it. Then set the
`$ES_TMPDIR` environment variable to point to it before starting Elasticsearch.

View File

@ -1 +0,0 @@
eb21a035c66ad307e66ec8fce37f5d50fd62d039

View File

@ -0,0 +1 @@
876ead1db19f0c9e79c9789273a3ef8c6fd6c29b

View File

@ -1 +0,0 @@
1c58cc9313ddf19f0900cd61ed044874278ce320

View File

@ -0,0 +1 @@
8b9826e16c3366764bfb7ad7362554f0471046c3

View File

@ -1 +0,0 @@
e853081fadaad3e98ed801937acc3d8f77580686

View File

@ -0,0 +1 @@
d9d1c49c5d9d5e46e2aee55f3cdd119286fe0fc1

View File

@ -1 +0,0 @@
1e08caf1d787c825307d8cc6362452086020d853

View File

@ -0,0 +1 @@
2e77c6ff7342cd61ab1ae7cb14ed16aebfc8a72a

View File

@ -279,10 +279,6 @@ public final class Def {
String type = signature.substring(1, separator);
String call = signature.substring(separator+1, separator2);
int numCaptures = Integer.parseInt(signature.substring(separator2+1));
Class<?> captures[] = new Class<?>[numCaptures];
for (int capture = 0; capture < captures.length; capture++) {
captures[capture] = callSiteType.parameterType(i + 1 + capture);
}
MethodHandle filter;
Class<?> interfaceType = method.typeParameters.get(i - 1 - replaced);
if (signature.charAt(0) == 'S') {
@ -294,11 +290,15 @@ public final class Def {
interfaceType,
type,
call,
captures);
numCaptures);
} else if (signature.charAt(0) == 'D') {
// the interface type is now known, but we need to get the implementation.
// this is dynamically based on the receiver type (and cached separately, underneath
// this cache). It won't blow up since we never nest here (just references)
Class<?> captures[] = new Class<?>[numCaptures];
for (int capture = 0; capture < captures.length; capture++) {
captures[capture] = callSiteType.parameterType(i + 1 + capture);
}
MethodType nestedType = MethodType.methodType(interfaceType, captures);
CallSite nested = DefBootstrap.bootstrap(painlessLookup,
localMethods,
@ -331,57 +331,34 @@ public final class Def {
*/
static MethodHandle lookupReference(PainlessLookup painlessLookup, Map<String, LocalMethod> localMethods,
MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class<?> receiverClass, String name) throws Throwable {
Class<?> interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass);
PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(interfaceType).functionalMethod;
if (interfaceMethod == null) {
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
}
int arity = interfaceMethod.typeParameters.size();
PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
Class<?> interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass);
PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType);
if (interfaceMethod == null) {
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
}
int arity = interfaceMethod.typeParameters.size();
PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup,
interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass),
implMethod.javaMethod.getName(), receiverClass);
implMethod.javaMethod.getName(), 1);
}
/** Returns a method handle to an implementation of clazz, given method reference signature. */
private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, Map<String, LocalMethod> localMethods,
MethodHandles.Lookup methodHandlesLookup, Class<?> clazz, String type, String call, Class<?>... captures) throws Throwable {
final FunctionRef ref;
if ("this".equals(type)) {
// user written method
PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(clazz).functionalMethod;
if (interfaceMethod == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface");
}
int arity = interfaceMethod.typeParameters.size() + captures.length;
LocalMethod localMethod = localMethods.get(Locals.buildLocalMethodKey(call, arity));
if (localMethod == null) {
// is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail
// because the arity does not match the expected interface type.
if (call.contains("$")) {
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() +
"] in [" + clazz + "]");
}
throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");
}
ref = new FunctionRef(clazz, interfaceMethod, call, localMethod.methodType, captures.length);
} else {
// whitelist lookup
ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length);
}
final CallSite callSite = LambdaBootstrap.lambdaBootstrap(
methodHandlesLookup,
ref.interfaceMethodName,
ref.factoryMethodType,
ref.interfaceMethodType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateMethodType,
ref.isDelegateInterface ? 1 : 0
);
return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, captures));
MethodHandles.Lookup methodHandlesLookup, Class<?> clazz, String type, String call, int captures) throws Throwable {
final FunctionRef ref = FunctionRef.create(painlessLookup, localMethods, null, clazz, type, call, captures);
final CallSite callSite = LambdaBootstrap.lambdaBootstrap(
methodHandlesLookup,
ref.interfaceMethodName,
ref.factoryMethodType,
ref.interfaceMethodType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateMethodType,
ref.isDelegateInterface ? 1 : 0
);
return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, ref.factoryMethodType.parameterArray()));
}
/**

View File

@ -20,17 +20,17 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessConstructor;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.objectweb.asm.Type;
import java.lang.invoke.MethodType;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.painless.WriterConstants.CLASS_NAME;
import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE;
@ -39,251 +39,210 @@ import static org.objectweb.asm.Opcodes.H_INVOKEVIRTUAL;
import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL;
/**
* Reference to a function or lambda.
* <p>
* Once you have created one of these, you have "everything you need" to call {@link LambdaBootstrap}
* either statically from bytecode with invokedynamic, or at runtime from Java.
* Contains all the values necessary to write the instruction to initiate a
* {@link LambdaBootstrap} for either a function reference or a user-defined
* lambda function.
*/
public class FunctionRef {
/**
* Creates a new FunctionRef which will resolve {@code type::call} from the whitelist.
* @param painlessLookup the whitelist against which this script is being compiled
* @param localMethods user-defined and synthetic methods generated directly on the script class
* @param location the character number within the script at compile-time
* @param targetClass functional interface type to implement.
* @param typeName the left hand side of a method reference expression
* @param methodName the right hand side of a method reference expression
* @param numberOfCaptures number of captured arguments
*/
public static FunctionRef create(PainlessLookup painlessLookup, Map<String, LocalMethod> localMethods, Location location,
Class<?> targetClass, String typeName, String methodName, int numberOfCaptures) {
Objects.requireNonNull(painlessLookup);
Objects.requireNonNull(targetClass);
Objects.requireNonNull(typeName);
Objects.requireNonNull(methodName);
String targetClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass);
PainlessMethod interfaceMethod;
try {
try {
interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " +
"to a non-functional interface [" + targetClassName + "]", iae);
}
String interfaceMethodName = interfaceMethod.javaMethod.getName();
MethodType interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
String delegateClassName;
boolean isDelegateInterface;
int delegateInvokeType;
String delegateMethodName;
MethodType delegateMethodType;
Class<?> delegateMethodReturnType;
List<Class<?>> delegateMethodParameters;
int interfaceTypeParametersSize = interfaceMethod.typeParameters.size();
if ("this".equals(typeName)) {
Objects.requireNonNull(localMethods);
if (numberOfCaptures < 0) {
throw new IllegalStateException("internal error");
}
String localMethodKey = Locals.buildLocalMethodKey(methodName, numberOfCaptures + interfaceTypeParametersSize);
LocalMethod localMethod = localMethods.get(localMethodKey);
if (localMethod == null) {
throw new IllegalArgumentException("function reference [this::" + localMethodKey + "] " +
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
"not found" + (localMethodKey.contains("$") ? " due to an incorrect number of arguments" : "")
);
}
delegateClassName = CLASS_NAME;
isDelegateInterface = false;
delegateInvokeType = H_INVOKESTATIC;
delegateMethodName = localMethod.name;
delegateMethodType = localMethod.methodType;
delegateMethodReturnType = localMethod.returnType;
delegateMethodParameters = localMethod.typeParameters;
} else if ("new".equals(methodName)) {
if (numberOfCaptures != 0) {
throw new IllegalStateException("internal error");
}
PainlessConstructor painlessConstructor;
try {
painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " +
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
"not found", iae);
}
delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName();
isDelegateInterface = false;
delegateInvokeType = H_NEWINVOKESPECIAL;
delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME;
delegateMethodType = painlessConstructor.methodType;
delegateMethodReturnType = painlessConstructor.javaConstructor.getDeclaringClass();
delegateMethodParameters = painlessConstructor.typeParameters;
} else {
if (numberOfCaptures != 0 && numberOfCaptures != 1) {
throw new IllegalStateException("internal error");
}
boolean captured = numberOfCaptures == 1;
PainlessMethod painlessMethod;
try {
painlessMethod = painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize);
if (captured) {
throw new IllegalStateException("internal error");
}
} catch (IllegalArgumentException staticIAE) {
try {
painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName,
captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException(
"function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " +
"matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " +
"not found", iae);
}
}
delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName();
isDelegateInterface = painlessMethod.javaMethod.getDeclaringClass().isInterface();
if (Modifier.isStatic(painlessMethod.javaMethod.getModifiers())) {
delegateInvokeType = H_INVOKESTATIC;
} else if (isDelegateInterface) {
delegateInvokeType = H_INVOKEINTERFACE;
} else {
delegateInvokeType = H_INVOKEVIRTUAL;
}
delegateMethodName = painlessMethod.javaMethod.getName();
delegateMethodType = painlessMethod.methodType;
delegateMethodReturnType = painlessMethod.returnType;
if (delegateMethodType.parameterList().size() > painlessMethod.typeParameters.size()) {
delegateMethodParameters = new ArrayList<>(painlessMethod.typeParameters);
delegateMethodParameters.add(0, delegateMethodType.parameterType(0));
} else {
delegateMethodParameters = painlessMethod.typeParameters;
}
}
if (location != null) {
for (int typeParameter = 0; typeParameter < interfaceTypeParametersSize; ++typeParameter) {
Class<?> from = interfaceMethod.typeParameters.get(typeParameter);
Class<?> to = delegateMethodParameters.get(numberOfCaptures + typeParameter);
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, delegateMethodReturnType, interfaceMethod.returnType, false, true);
}
}
MethodType factoryMethodType = MethodType.methodType(targetClass,
delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount()));
delegateMethodType = delegateMethodType.dropParameterTypes(0, numberOfCaptures);
return new FunctionRef(interfaceMethodName, interfaceMethodType,
delegateClassName, isDelegateInterface, delegateInvokeType, delegateMethodName, delegateMethodType,
factoryMethodType
);
} catch (IllegalArgumentException iae) {
if (location != null) {
throw location.createError(iae);
}
throw iae;
}
}
/** functional interface method name */
public final String interfaceMethodName;
/** factory (CallSite) method signature */
public final MethodType factoryMethodType;
/** functional interface method signature */
public final MethodType interfaceMethodType;
/** class of the delegate method to be called */
public final String delegateClassName;
/** whether a call is made on a delegate interface */
public final boolean isDelegateInterface;
/** the invocation type of the delegate method */
public final int delegateInvokeType;
/** the name of the delegate method */
public final String delegateMethodName;
/** delegate method signature */
public final MethodType delegateMethodType;
/** factory (CallSite) method signature */
public final MethodType factoryMethodType;
/** interface method */
public final PainlessMethod interfaceMethod;
/** delegate method type parameters */
public final List<Class<?>> delegateTypeParameters;
/** delegate method return type */
public final Class<?> delegateReturnType;
private FunctionRef(
String interfaceMethodName, MethodType interfaceMethodType,
String delegateClassName, boolean isDelegateInterface,
int delegateInvokeType, String delegateMethodName, MethodType delegateMethodType,
MethodType factoryMethodType) {
/** factory method type descriptor */
public final String factoryDescriptor;
/** functional interface method as type */
public final Type interfaceType;
/** delegate method type method as type */
public final Type delegateType;
/** whether a call is made on a delegate interface */
public final boolean isDelegateInterface;
/**
* Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist.
* @param painlessLookup the whitelist against which this script is being compiled
* @param expected functional interface type to implement.
* @param type the left hand side of a method reference expression
* @param call the right hand side of a method reference expression
* @param numCaptures number of captured arguments
*/
public static FunctionRef resolveFromLookup(
PainlessLookup painlessLookup, Class<?> expected, String type, String call, int numCaptures) {
if ("new".equals(call)) {
return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod,
lookup(painlessLookup, expected, type), numCaptures);
} else {
return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod,
lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures);
}
}
/**
* Creates a new FunctionRef (already resolved)
* @param expected functional interface type to implement
* @param interfaceMethod functional interface method
* @param delegateConstructor implementation constructor
* @param numCaptures number of captured arguments
*/
public FunctionRef(Class<?> expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) {
Constructor<?> javaConstructor = delegateConstructor.javaConstructor;
MethodType delegateMethodType = delegateConstructor.methodType;
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.delegateClassName = javaConstructor.getDeclaringClass().getName();
this.isDelegateInterface = false;
this.delegateInvokeType = H_NEWINVOKESPECIAL;
this.delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
this.delegateTypeParameters = delegateConstructor.typeParameters;
this.delegateReturnType = void.class;
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
* Creates a new FunctionRef (already resolved)
* @param expected functional interface type to implement
* @param interfaceMethod functional interface method
* @param delegateMethod implementation method
* @param numCaptures number of captured arguments
*/
public FunctionRef(Class<?> expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) {
MethodType delegateMethodType = delegateMethod.methodType;
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.delegateClassName = delegateMethod.javaMethod.getDeclaringClass().getName();
this.isDelegateInterface = delegateMethod.javaMethod.getDeclaringClass().isInterface();
if (Modifier.isStatic(delegateMethod.javaMethod.getModifiers())) {
this.delegateInvokeType = H_INVOKESTATIC;
} else if (delegateMethod.javaMethod.getDeclaringClass().isInterface()) {
this.delegateInvokeType = H_INVOKEINTERFACE;
} else {
this.delegateInvokeType = H_INVOKEVIRTUAL;
}
this.delegateMethodName = delegateMethod.javaMethod.getName();
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
this.delegateTypeParameters = delegateMethod.typeParameters;
this.delegateReturnType = delegateMethod.returnType;
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
* Creates a new FunctionRef (already resolved)
* @param expected functional interface type to implement
* @param interfaceMethod functional interface method
* @param delegateMethod implementation method
* @param numCaptures number of captured arguments
*/
public FunctionRef(Class<?> expected, PainlessMethod interfaceMethod, LocalMethod delegateMethod, int numCaptures) {
MethodType delegateMethodType = delegateMethod.methodType;
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.delegateClassName = CLASS_NAME;
this.isDelegateInterface = false;
this.delegateInvokeType = H_INVOKESTATIC;
this.delegateMethodName = delegateMethod.name;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
this.delegateTypeParameters = delegateMethod.typeParameters;
this.delegateReturnType = delegateMethod.returnType;
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
* Creates a new FunctionRef (low level).
* It is for runtime use only.
*/
public FunctionRef(Class<?> expected,
PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) {
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.delegateClassName = CLASS_NAME;
this.delegateInvokeType = H_INVOKESTATIC;
this.interfaceMethodName = interfaceMethodName;
this.interfaceMethodType = interfaceMethodType;
this.delegateClassName = delegateClassName;
this.isDelegateInterface = isDelegateInterface;
this.delegateInvokeType = delegateInvokeType;
this.delegateMethodName = delegateMethodName;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.isDelegateInterface = false;
this.interfaceMethod = null;
this.delegateTypeParameters = null;
this.delegateReturnType = null;
this.factoryDescriptor = null;
this.interfaceType = null;
this.delegateType = null;
}
/**
* Looks up {@code type} from the whitelist, and returns a matching constructor.
*/
private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class<?> expected, String type) {
// check its really a functional interface
// for e.g. Comparable
PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod;
if (method == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface");
}
// lookup requested constructor
PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type));
PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.typeParameters.size()));
if (impl == null) {
throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]");
}
return impl;
}
/**
* Looks up {@code type::call} from the whitelist, and returns a matching method.
*/
private static PainlessMethod lookup(PainlessLookup painlessLookup, Class<?> expected,
String type, String call, boolean receiverCaptured) {
// check its really a functional interface
// for e.g. Comparable
PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod;
if (method == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface");
}
// lookup requested method
PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type));
final PainlessMethod impl;
// look for a static impl first
PainlessMethod staticImpl =
struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.typeParameters.size()));
if (staticImpl == null) {
// otherwise a virtual impl
final int arity;
if (receiverCaptured) {
// receiver captured
arity = method.typeParameters.size();
} else {
// receiver passed
arity = method.typeParameters.size() - 1;
}
impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity));
} else {
impl = staticImpl;
}
if (impl == null) {
throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " +
"[" + expected + "]");
}
return impl;
this.delegateMethodType = delegateMethodType;
this.factoryMethodType = factoryMethodType;
}
}

View File

@ -56,6 +56,7 @@ import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_EXPLICIT;
import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT;
import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE;
import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS;
import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE;
import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN;
@ -439,4 +440,18 @@ public final class MethodWriter extends GeneratorAdapter {
invokeVirtual(type, method);
}
}
public void invokeLambdaCall(FunctionRef functionRef) {
invokeDynamic(
functionRef.interfaceMethodName,
functionRef.factoryMethodType.toMethodDescriptorString(),
LAMBDA_BOOTSTRAP_HANDLE,
Type.getMethodType(functionRef.interfaceMethodType.toMethodDescriptorString()),
functionRef.delegateClassName,
functionRef.delegateInvokeType,
functionRef.delegateMethodName,
Type.getMethodType(functionRef.delegateMethodType.toMethodDescriptorString()),
functionRef.isDelegateInterface ? 1 : 0
);
}
}

View File

@ -35,13 +35,13 @@ public final class PainlessClass {
public final Map<String, MethodHandle> getterMethodHandles;
public final Map<String, MethodHandle> setterMethodHandles;
public final PainlessMethod functionalMethod;
public final PainlessMethod functionalInterfaceMethod;
PainlessClass(Map<String, PainlessConstructor> constructors,
Map<String, PainlessMethod> staticMethods, Map<String, PainlessMethod> methods,
Map<String, PainlessField> staticFields, Map<String, PainlessField> fields,
Map<String, MethodHandle> getterMethodHandles, Map<String, MethodHandle> setterMethodHandles,
PainlessMethod functionalMethod) {
PainlessMethod functionalInterfaceMethod) {
this.constructors = Collections.unmodifiableMap(constructors);
@ -54,6 +54,6 @@ public final class PainlessClass {
this.getterMethodHandles = Collections.unmodifiableMap(getterMethodHandles);
this.setterMethodHandles = Collections.unmodifiableMap(setterMethodHandles);
this.functionalMethod = functionalMethod;
this.functionalInterfaceMethod = functionalInterfaceMethod;
}
}

View File

@ -35,7 +35,7 @@ final class PainlessClassBuilder {
final Map<String, MethodHandle> getterMethodHandles;
final Map<String, MethodHandle> setterMethodHandles;
PainlessMethod functionalMethod;
PainlessMethod functionalInterfaceMethod;
PainlessClassBuilder() {
constructors = new HashMap<>();
@ -49,11 +49,11 @@ final class PainlessClassBuilder {
getterMethodHandles = new HashMap<>();
setterMethodHandles = new HashMap<>();
functionalMethod = null;
functionalInterfaceMethod = null;
}
PainlessClass build() {
return new PainlessClass(constructors, staticMethods, methods, staticFields, fields,
getterMethodHandles, setterMethodHandles, functionalMethod);
getterMethodHandles, setterMethodHandles, functionalInterfaceMethod);
}
}

View File

@ -62,6 +62,14 @@ public final class PainlessLookup {
return classesToPainlessClasses.get(targetClass);
}
public PainlessConstructor lookupPainlessConstructor(String targetClassName, int constructorArity) {
Objects.requireNonNull(targetClassName);
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
return lookupPainlessConstructor(targetClass, constructorArity);
}
public PainlessConstructor lookupPainlessConstructor(Class<?> targetClass, int constructorArity) {
Objects.requireNonNull(targetClass);
@ -83,6 +91,14 @@ public final class PainlessLookup {
return painlessConstructor;
}
public PainlessMethod lookupPainlessMethod(String targetClassName, boolean isStatic, String methodName, int methodArity) {
Objects.requireNonNull(targetClassName);
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity);
}
public PainlessMethod lookupPainlessMethod(Class<?> targetClass, boolean isStatic, String methodName, int methodArity) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(methodName);
@ -111,6 +127,14 @@ public final class PainlessLookup {
return painlessMethod;
}
public PainlessField lookupPainlessField(String targetClassName, boolean isStatic, String fieldName) {
Objects.requireNonNull(targetClassName);
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
return lookupPainlessField(targetClass, isStatic, fieldName);
}
public PainlessField lookupPainlessField(Class<?> targetClass, boolean isStatic, String fieldName) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(fieldName);
@ -134,4 +158,20 @@ public final class PainlessLookup {
return painlessField;
}
public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class<?> targetClass) {
PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass);
if (targetPainlessClass == null) {
throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] not found");
}
PainlessMethod functionalInterfacePainlessMethod = targetPainlessClass.functionalInterfaceMethod;
if (functionalInterfacePainlessMethod == null) {
throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] is not a functional interface");
}
return functionalInterfacePainlessMethod;
}
}

View File

@ -875,7 +875,7 @@ public final class PainlessLookupBuilder {
} else if (javaMethods.size() == 1) {
java.lang.reflect.Method javaMethod = javaMethods.get(0);
String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount());
painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey);
painlessClassBuilder.functionalInterfaceMethod = painlessClassBuilder.methods.get(painlessMethodKey);
}
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
@ -35,8 +34,6 @@ import org.objectweb.asm.Type;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
/**
* Represents a capturing function reference.
*/
@ -76,23 +73,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
defPointer = null;
// static case
if (captured.clazz != def.class) {
try {
ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected,
PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1);
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < ref.interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = ref.interfaceMethod.typeParameters.get(i);
Class<?> to = ref.delegateTypeParameters.get(i);
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (ref.interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.returnType, false, true);
}
} catch (IllegalArgumentException e) {
throw createError(e);
}
ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location,
expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1);
}
actual = expected;
}
@ -114,17 +96,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
} else {
// typed interface, typed implementation
writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot());
writer.invokeDynamic(
ref.interfaceMethodName,
ref.factoryDescriptor,
LAMBDA_BOOTSTRAP_HANDLE,
ref.interfaceType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateType,
ref.isDelegateInterface ? 1 : 0
);
writer.invokeLambdaCall(ref);
}
}

View File

@ -19,22 +19,16 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.objectweb.asm.Type;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
/**
* Represents a function reference.
*/
@ -63,39 +57,7 @@ public final class EFunctionRef extends AExpression implements ILambda {
defPointer = "S" + type + "." + call + ",0";
} else {
defPointer = null;
try {
if ("this".equals(type)) {
// user's own function
PainlessMethod interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod;
if (interfaceMethod == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface");
}
LocalMethod delegateMethod = locals.getMethod(call, interfaceMethod.typeParameters.size());
if (delegateMethod == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found");
}
ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0);
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = interfaceMethod.typeParameters.get(i);
Class<?> to = delegateMethod.typeParameters.get(i);
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, delegateMethod.returnType, interfaceMethod.returnType, false, true);
}
} else {
// whitelist lookup
ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0);
}
} catch (IllegalArgumentException e) {
throw createError(e);
}
ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, expected, type, call, 0);
actual = expected;
}
}
@ -104,17 +66,7 @@ public final class EFunctionRef extends AExpression implements ILambda {
void write(MethodWriter writer, Globals globals) {
if (ref != null) {
writer.writeDebugInfo(location);
writer.invokeDynamic(
ref.interfaceMethodName,
ref.factoryDescriptor,
LAMBDA_BOOTSTRAP_HANDLE,
ref.interfaceType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateType,
ref.isDelegateInterface ? 1 : 0
);
writer.invokeLambdaCall(ref);
} else {
// TODO: don't do this: its just to cutover :)
writer.push((String)null);

View File

@ -19,11 +19,9 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
@ -40,8 +38,6 @@ import java.util.List;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
/**
* Lambda expression node.
* <p>
@ -122,7 +118,7 @@ public final class ELambda extends AExpression implements ILambda {
} else {
// we know the method statically, infer return type and any unknown/def types
interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod;
interfaceMethod = locals.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(expected);
if (interfaceMethod == null) {
throw createError(new IllegalArgumentException("Cannot pass lambda to " +
"[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"));
@ -184,25 +180,8 @@ public final class ELambda extends AExpression implements ILambda {
defPointer = "Sthis." + name + "," + captures.size();
} else {
defPointer = null;
try {
LocalMethod localMethod =
new LocalMethod(desugared.name, desugared.returnType, desugared.typeParameters, desugared.methodType);
ref = new FunctionRef(expected, interfaceMethod, localMethod, captures.size());
} catch (IllegalArgumentException e) {
throw createError(e);
}
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = interfaceMethod.typeParameters.get(i);
Class<?> to = desugared.parameters.get(i + captures.size()).clazz;
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, desugared.returnType, interfaceMethod.returnType, false, true);
}
ref = FunctionRef.create(
locals.getPainlessLookup(), locals.getMethods(), location, expected, "this", desugared.name, captures.size());
actual = expected;
}
}
@ -218,17 +197,7 @@ public final class ELambda extends AExpression implements ILambda {
writer.visitVarInsn(MethodWriter.getType(capture.clazz).getOpcode(Opcodes.ILOAD), capture.getSlot());
}
writer.invokeDynamic(
ref.interfaceMethodName,
ref.factoryDescriptor,
LAMBDA_BOOTSTRAP_HANDLE,
ref.interfaceType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateType,
ref.isDelegateInterface ? 1 : 0
);
writer.invokeLambdaCall(ref);
} else {
// placeholder
writer.push((String)null);

View File

@ -27,7 +27,6 @@ import java.lang.invoke.LambdaConversionException;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.startsWith;
public class FunctionRefTests extends ScriptTestCase {
@ -193,14 +192,15 @@ public class FunctionRefTests extends ScriptTestCase {
Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);");
});
assertThat(e.getMessage(), startsWith("Unknown reference"));
assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator"));
}
public void testQualifiedMethodMissing() {
Exception e = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false);
});
assertThat(e.getMessage(), startsWith("Unknown reference"));
assertThat(e.getMessage(),
containsString("function reference [org.joda.time.ReadableDateTime::bogus/2] matching [java.util.Comparator"));
}
public void testClassMissing() {
@ -223,11 +223,12 @@ public class FunctionRefTests extends ScriptTestCase {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);");
});
assertThat(expected.getMessage(), containsString("Cannot convert function reference"));
assertThat(expected.getMessage(),
containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]"));
}
public void testIncompatible() {
expectScriptThrows(BootstrapMethodError.class, () -> {
expectScriptThrows(ClassCastException.class, () -> {
exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);");
});
}
@ -236,28 +237,32 @@ public class FunctionRefTests extends ScriptTestCase {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("Optional.empty().orElseGet(String::startsWith);");
});
assertThat(expected.getMessage(), containsString("Unknown reference"));
assertThat(expected.getMessage(),
containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier"));
}
public void testWrongArityNotEnough() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
});
assertTrue(expected.getMessage().contains("Unknown reference"));
assertThat(expected.getMessage(), containsString(
"function reference [String::isEmpty/2] matching [java.util.Comparator"));
}
public void testWrongArityDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);");
});
assertThat(expected.getMessage(), containsString("Unknown reference"));
assertThat(expected.getMessage(),
containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier"));
}
public void testWrongArityNotEnoughDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
});
assertThat(expected.getMessage(), containsString("Unknown reference"));
assertThat(expected.getMessage(),
containsString("function reference [String::isEmpty/2] matching [java.util.Comparator"));
}
public void testReturnVoid() {

View File

@ -184,7 +184,7 @@ public class LambdaTests extends ScriptTestCase {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def y = Optional.empty(); return y.orElseGet(x -> x);");
});
assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters"));
assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments"));
}
public void testWrongArityNotEnough() {
@ -200,7 +200,7 @@ public class LambdaTests extends ScriptTestCase {
exec("def l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(() -> 5).sum();");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments"));
}
public void testLambdaInFunction() {

View File

@ -756,7 +756,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
@Override
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndexName());
IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName());
IndexFieldDataCache cache = new IndexFieldDataCache.None();
CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService();
return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker,
@ -764,5 +764,4 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
}
};
}
}

View File

@ -58,6 +58,7 @@ import java.util.concurrent.TimeUnit;
import static io.netty.handler.codec.http.HttpHeaderNames.HOST;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import static org.junit.Assert.fail;
/**
* Tiny helper to send http requests over netty.
@ -145,7 +146,9 @@ class Netty4HttpClient implements Closeable {
for (HttpRequest request : requests) {
channelFuture.channel().writeAndFlush(request);
}
latch.await(30, TimeUnit.SECONDS);
if (latch.await(30L, TimeUnit.SECONDS) == false) {
fail("Failed to get all expected responses.");
}
} finally {
if (channelFuture != null) {

View File

@ -0,0 +1,33 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'custom-suggester'
description 'An example plugin showing how to write and register a custom suggester'
classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin'
}
integTestCluster {
numNodes = 2
}
// this plugin has no unit tests, only rest tests
tasks.test.enabled = false

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggester;
import java.util.Locale;
public class CustomSuggester extends Suggester<CustomSuggestionContext> {
// This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123
@Override
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(
String name,
CustomSuggestionContext suggestion,
IndexSearcher searcher,
CharsRefBuilder spare) {
// Get the suggestion context
String text = suggestion.getText().utf8ToString();
// create two suggestions with 12 and 123 appended
CustomSuggestion response = new CustomSuggestion(name, suggestion.getSize(), "suggestion-dummy-value");
CustomSuggestion.Entry entry = new CustomSuggestion.Entry(new Text(text), 0, text.length(), "entry-dummy-value");
String firstOption =
String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12");
CustomSuggestion.Entry.Option option12 = new CustomSuggestion.Entry.Option(new Text(firstOption), 0.9f, "option-dummy-value-1");
entry.addOption(option12);
String secondOption =
String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123");
CustomSuggestion.Entry.Option option123 = new CustomSuggestion.Entry.Option(new Text(secondOption), 0.8f, "option-dummy-value-2");
entry.addOption(option123);
response.addTerm(entry);
return response;
}
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import java.util.Collections;
import java.util.List;
public class CustomSuggesterPlugin extends Plugin implements SearchPlugin {
@Override
public List<SearchPlugin.SuggesterSpec<?>> getSuggesters() {
return Collections.singletonList(
new SearchPlugin.SuggesterSpec<>(
CustomSuggestionBuilder.SUGGESTION_NAME,
CustomSuggestionBuilder::new,
CustomSuggestionBuilder::fromXContent,
CustomSuggestion::new
)
);
}
}

View File

@ -0,0 +1,227 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class CustomSuggestion extends Suggest.Suggestion<CustomSuggestion.Entry> {
public static final int TYPE = 999;
public static final ParseField DUMMY = new ParseField("dummy");
private String dummy;
public CustomSuggestion(String name, int size, String dummy) {
super(name, size);
this.dummy = dummy;
}
public CustomSuggestion(StreamInput in) throws IOException {
super(in);
dummy = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(dummy);
}
@Override
public String getWriteableName() {
return CustomSuggestionBuilder.SUGGESTION_NAME;
}
@Override
public int getWriteableType() {
return TYPE;
}
/**
* A meaningless value used to test that plugin suggesters can add fields to their Suggestion types
*
* This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place
* to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior
*/
public String getDummy() {
return dummy;
}
@Override
protected Entry newEntry() {
return new Entry();
}
@Override
protected Entry newEntry(StreamInput in) throws IOException {
return new Entry(in);
}
public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException {
CustomSuggestion suggestion = new CustomSuggestion(name, -1, null);
parseEntries(parser, suggestion, Entry::fromXContent);
return suggestion;
}
public static class Entry extends Suggest.Suggestion.Entry<CustomSuggestion.Entry.Option> {
private static final ObjectParser<Entry, Void> PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new);
static {
declareCommonFields(PARSER);
PARSER.declareString((entry, dummy) -> entry.dummy = dummy, DUMMY);
PARSER.declareObjectArray(Entry::addOptions, (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS));
}
private String dummy;
public Entry() {}
public Entry(Text text, int offset, int length, String dummy) {
super(text, offset, length);
this.dummy = dummy;
}
public Entry(StreamInput in) throws IOException {
super(in);
dummy = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(dummy);
}
@Override
protected Option newOption() {
return new Option();
}
@Override
protected Option newOption(StreamInput in) throws IOException {
return new Option(in);
}
/*
* the value of dummy will always be the same, so this just tests that we can merge entries with custom fields
*/
@Override
protected void merge(Suggest.Suggestion.Entry<Option> otherEntry) {
dummy = ((Entry) otherEntry).getDummy();
}
/**
* Meaningless field used to test that plugin suggesters can add fields to their entries
*/
public String getDummy() {
return dummy;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder = super.toXContent(builder, params);
builder.field(DUMMY.getPreferredName(), getDummy());
return builder;
}
public static Entry fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static class Option extends Suggest.Suggestion.Entry.Option {
private static final ConstructingObjectParser<Option, Void> PARSER = new ConstructingObjectParser<>(
"CustomSuggestionObjectParser", true,
args -> {
Text text = new Text((String) args[0]);
float score = (float) args[1];
String dummy = (String) args[2];
return new Option(text, score, dummy);
});
static {
PARSER.declareString(constructorArg(), TEXT);
PARSER.declareFloat(constructorArg(), SCORE);
PARSER.declareString(constructorArg(), DUMMY);
}
private String dummy;
public Option() {}
public Option(Text text, float score, String dummy) {
super(text, score);
this.dummy = dummy;
}
public Option(StreamInput in) throws IOException {
super(in);
dummy = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(dummy);
}
/**
* A meaningless value used to test that plugin suggesters can add fields to their options
*/
public String getDummy() {
return dummy;
}
/*
* the value of dummy will always be the same, so this just tests that we can merge options with custom fields
*/
@Override
protected void mergeInto(Suggest.Suggestion.Entry.Option otherOption) {
super.mergeInto(otherOption);
dummy = ((Option) otherOption).getDummy();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder = super.toXContent(builder, params);
builder.field(DUMMY.getPreferredName(), dummy);
return builder;
}
public static Option fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
}
}
}

View File

@ -0,0 +1,143 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public class CustomSuggestionBuilder extends SuggestionBuilder<CustomSuggestionBuilder> {
public static final String SUGGESTION_NAME = "custom";
protected static final ParseField RANDOM_SUFFIX_FIELD = new ParseField("suffix");
private String randomSuffix;
public CustomSuggestionBuilder(String randomField, String randomSuffix) {
super(randomField);
this.randomSuffix = randomSuffix;
}
/**
* Read from a stream.
*/
public CustomSuggestionBuilder(StreamInput in) throws IOException {
super(in);
this.randomSuffix = in.readString();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(randomSuffix);
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
return builder;
}
@Override
public String getWriteableName() {
return SUGGESTION_NAME;
}
@Override
protected boolean doEquals(CustomSuggestionBuilder other) {
return Objects.equals(randomSuffix, other.randomSuffix);
}
@Override
protected int doHashCode() {
return Objects.hash(randomSuffix);
}
public static CustomSuggestionBuilder fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
String currentFieldName = null;
String fieldname = null;
String suffix = null;
String analyzer = null;
int sizeField = -1;
int shardSize = -1;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SuggestionBuilder.ANALYZER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
analyzer = parser.text();
} else if (SuggestionBuilder.FIELDNAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fieldname = parser.text();
} else if (SuggestionBuilder.SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
sizeField = parser.intValue();
} else if (SuggestionBuilder.SHARDSIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
shardSize = parser.intValue();
} else if (RANDOM_SUFFIX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
suffix = parser.text();
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"suggester[custom] doesn't support field [" + currentFieldName + "]");
}
}
// now we should have field name, check and copy fields over to the suggestion builder we return
if (fieldname == null) {
throw new ParsingException(parser.getTokenLocation(), "the required field option is missing");
}
CustomSuggestionBuilder builder = new CustomSuggestionBuilder(fieldname, suffix);
if (analyzer != null) {
builder.analyzer(analyzer);
}
if (sizeField != -1) {
builder.size(sizeField);
}
if (shardSize != -1) {
builder.shardSize(shardSize);
}
return builder;
}
@Override
public SuggestionSearchContext.SuggestionContext build(QueryShardContext context) throws IOException {
Map<String, Object> options = new HashMap<>();
options.put(FIELDNAME_FIELD.getPreferredName(), field());
options.put(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
CustomSuggestionContext customSuggestionsContext = new CustomSuggestionContext(context, options);
customSuggestionsContext.setField(field());
assert text != null;
customSuggestionsContext.setText(BytesRefs.toBytesRef(text));
return customSuggestionsContext;
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.Map;
public class CustomSuggestionContext extends SuggestionSearchContext.SuggestionContext {
public Map<String, Object> options;
public CustomSuggestionContext(QueryShardContext context, Map<String, Object> options) {
super(new CustomSuggester(), context);
this.options = options;
}
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example.customsuggester;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
public class CustomSuggesterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public CustomSuggesterClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
}

View File

@ -0,0 +1,13 @@
# tests that the custom suggester plugin is installed
---
"plugin loaded":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- contains: { nodes.$master.plugins: { name: custom-suggester } }

View File

@ -0,0 +1,55 @@
# tests that the custom suggester works
# the issue that prompted serializing Suggestion as a registered named writeable was not revealed until
# a user found that it would fail when reducing suggestions in a multi node envrionment
# https://github.com/elastic/elasticsearch/issues/26585
"test custom suggester":
- do:
cluster.health:
wait_for_nodes: 2
- is_true: cluster_name
- is_false: timed_out
- gte: { number_of_nodes: 2 }
- gte: { number_of_data_nodes: 2 }
- do:
indices.create:
index: test
body:
settings:
number_of_shards: 2
number_of_replicas: 0
- do:
bulk:
index: test
type: test
refresh: true
body: |
{ "index": {} }
{ "content": "these" }
{ "index": {} }
{ "content": "aren't" }
{ "index": {} }
{ "content": "actually" }
{ "index": {} }
{ "content": "used" }
- do:
search:
size: 0
index: test
body:
suggest:
test:
text: my suggestion text
custom:
field: arbitraryField
suffix: arbitrarySuffix
- match: { suggest.test.0.dummy: entry-dummy-value }
- match: { suggest.test.0.options.0.text: my suggestion text-arbitraryField-arbitrarySuffix-12 }
- match: { suggest.test.0.options.0.dummy: option-dummy-value-1 }
- match: { suggest.test.0.options.1.text: my suggestion text-arbitraryField-arbitrarySuffix-123 }
- match: { suggest.test.0.options.1.dummy: option-dummy-value-2 }

View File

@ -1 +0,0 @@
5e924646d6f893bc9036939c5f2b4ecaee85e5da

View File

@ -0,0 +1 @@
391de20b4e29cb3fb07d2454ace64be2c82ac91f

View File

@ -1 +0,0 @@
f7b83cb2bc4b88d53961e749e1ad32f49ef017b7

View File

@ -0,0 +1 @@
0569a9f220273024523799dba9dd358121b0ee09

View File

@ -1,201 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.http.nio;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpObject;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpRequestEncoder;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseDecoder;
import io.netty.handler.codec.http.HttpVersion;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.tasks.Task;
import java.io.Closeable;
import java.net.SocketAddress;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static io.netty.handler.codec.http.HttpHeaderNames.HOST;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
/**
* Tiny helper to send http requests over netty.
*/
class Netty4HttpClient implements Closeable {
static Collection<String> returnHttpResponseBodies(Collection<FullHttpResponse> responses) {
List<String> list = new ArrayList<>(responses.size());
for (FullHttpResponse response : responses) {
list.add(response.content().toString(StandardCharsets.UTF_8));
}
return list;
}
static Collection<String> returnOpaqueIds(Collection<FullHttpResponse> responses) {
List<String> list = new ArrayList<>(responses.size());
for (HttpResponse response : responses) {
list.add(response.headers().get(Task.X_OPAQUE_ID));
}
return list;
}
private final Bootstrap clientBootstrap;
Netty4HttpClient() {
clientBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(new NioEventLoopGroup());
}
public Collection<FullHttpResponse> get(SocketAddress remoteAddress, String... uris) throws InterruptedException {
Collection<HttpRequest> requests = new ArrayList<>(uris.length);
for (int i = 0; i < uris.length; i++) {
final HttpRequest httpRequest = new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uris[i]);
httpRequest.headers().add(HOST, "localhost");
httpRequest.headers().add(Task.X_OPAQUE_ID, String.valueOf(i));
requests.add(httpRequest);
}
return sendRequests(remoteAddress, requests);
}
@SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods.
public final Collection<FullHttpResponse> post(SocketAddress remoteAddress, Tuple<String, CharSequence>... urisAndBodies)
throws InterruptedException {
return processRequestsWithBody(HttpMethod.POST, remoteAddress, urisAndBodies);
}
public final FullHttpResponse post(SocketAddress remoteAddress, FullHttpRequest httpRequest) throws InterruptedException {
Collection<FullHttpResponse> responses = sendRequests(remoteAddress, Collections.singleton(httpRequest));
assert responses.size() == 1 : "expected 1 and only 1 http response";
return responses.iterator().next();
}
@SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods.
public final Collection<FullHttpResponse> put(SocketAddress remoteAddress, Tuple<String, CharSequence>... urisAndBodies)
throws InterruptedException {
return processRequestsWithBody(HttpMethod.PUT, remoteAddress, urisAndBodies);
}
private Collection<FullHttpResponse> processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple<String,
CharSequence>... urisAndBodies) throws InterruptedException {
Collection<HttpRequest> requests = new ArrayList<>(urisAndBodies.length);
for (Tuple<String, CharSequence> uriAndBody : urisAndBodies) {
ByteBuf content = Unpooled.copiedBuffer(uriAndBody.v2(), StandardCharsets.UTF_8);
HttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, method, uriAndBody.v1(), content);
request.headers().add(HttpHeaderNames.HOST, "localhost");
request.headers().add(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes());
request.headers().add(HttpHeaderNames.CONTENT_TYPE, "application/json");
requests.add(request);
}
return sendRequests(remoteAddress, requests);
}
private synchronized Collection<FullHttpResponse> sendRequests(
final SocketAddress remoteAddress,
final Collection<HttpRequest> requests) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(requests.size());
final Collection<FullHttpResponse> content = Collections.synchronizedList(new ArrayList<>(requests.size()));
clientBootstrap.handler(new CountDownLatchHandler(latch, content));
ChannelFuture channelFuture = null;
try {
channelFuture = clientBootstrap.connect(remoteAddress);
channelFuture.sync();
for (HttpRequest request : requests) {
channelFuture.channel().writeAndFlush(request);
}
latch.await(30, TimeUnit.SECONDS);
} finally {
if (channelFuture != null) {
channelFuture.channel().close().sync();
}
}
return content;
}
@Override
public void close() {
clientBootstrap.config().group().shutdownGracefully().awaitUninterruptibly();
}
/**
* helper factory which adds returned data to a list and uses a count down latch to decide when done
*/
private static class CountDownLatchHandler extends ChannelInitializer<SocketChannel> {
private final CountDownLatch latch;
private final Collection<FullHttpResponse> content;
CountDownLatchHandler(final CountDownLatch latch, final Collection<FullHttpResponse> content) {
this.latch = latch;
this.content = content;
}
@Override
protected void initChannel(SocketChannel ch) throws Exception {
final int maxContentLength = new ByteSizeValue(100, ByteSizeUnit.MB).bytesAsInt();
ch.pipeline().addLast(new HttpResponseDecoder());
ch.pipeline().addLast(new HttpRequestEncoder());
ch.pipeline().addLast(new HttpObjectAggregator(maxContentLength));
ch.pipeline().addLast(new SimpleChannelInboundHandler<HttpObject>() {
@Override
protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
final FullHttpResponse response = (FullHttpResponse) msg;
content.add(response.copy());
latch.countDown();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
super.exceptionCaught(ctx, cause);
latch.countDown();
}
});
}
}
}

View File

@ -0,0 +1,285 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.http.nio;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpRequestEncoder;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseDecoder;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.nio.BytesChannelContext;
import org.elasticsearch.nio.ChannelFactory;
import org.elasticsearch.nio.EventHandler;
import org.elasticsearch.nio.FlushOperation;
import org.elasticsearch.nio.InboundChannelBuffer;
import org.elasticsearch.nio.NioGroup;
import org.elasticsearch.nio.NioSelector;
import org.elasticsearch.nio.NioServerSocketChannel;
import org.elasticsearch.nio.NioSocketChannel;
import org.elasticsearch.nio.ReadWriteHandler;
import org.elasticsearch.nio.SocketChannelContext;
import org.elasticsearch.nio.WriteOperation;
import org.elasticsearch.tasks.Task;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.InetSocketAddress;
import java.nio.channels.ServerSocketChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import static io.netty.handler.codec.http.HttpHeaderNames.HOST;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
import static org.junit.Assert.fail;
/**
* Tiny helper to send http requests over nio.
*/
class NioHttpClient implements Closeable {
static Collection<String> returnOpaqueIds(Collection<FullHttpResponse> responses) {
List<String> list = new ArrayList<>(responses.size());
for (HttpResponse response : responses) {
list.add(response.headers().get(Task.X_OPAQUE_ID));
}
return list;
}
private final NioGroup nioGroup;
private final Logger logger;
NioHttpClient() {
logger = Loggers.getLogger(NioHttpClient.class, Settings.EMPTY);
try {
nioGroup = new NioGroup(daemonThreadFactory(Settings.EMPTY, "nio-http-client"), 1,
(s) -> new EventHandler(this::onException, s));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public Collection<FullHttpResponse> get(InetSocketAddress remoteAddress, String... uris) throws InterruptedException {
Collection<HttpRequest> requests = new ArrayList<>(uris.length);
for (int i = 0; i < uris.length; i++) {
final HttpRequest httpRequest = new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uris[i]);
httpRequest.headers().add(HOST, "localhost");
httpRequest.headers().add(Task.X_OPAQUE_ID, String.valueOf(i));
requests.add(httpRequest);
}
return sendRequests(remoteAddress, requests);
}
public final FullHttpResponse post(InetSocketAddress remoteAddress, FullHttpRequest httpRequest) throws InterruptedException {
Collection<FullHttpResponse> responses = sendRequests(remoteAddress, Collections.singleton(httpRequest));
assert responses.size() == 1 : "expected 1 and only 1 http response";
return responses.iterator().next();
}
private void onException(Exception e) {
logger.error("Exception from http client", e);
}
private synchronized Collection<FullHttpResponse> sendRequests(InetSocketAddress remoteAddress, Collection<HttpRequest> requests)
throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(requests.size());
final Collection<FullHttpResponse> content = Collections.synchronizedList(new ArrayList<>(requests.size()));
ChannelFactory<NioServerSocketChannel, NioSocketChannel> factory = new ClientChannelFactory(latch, content);
NioSocketChannel nioSocketChannel = null;
try {
nioSocketChannel = nioGroup.openChannel(remoteAddress, factory);
PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture();
nioSocketChannel.addConnectListener(ActionListener.toBiConsumer(connectFuture));
connectFuture.actionGet();
for (HttpRequest request : requests) {
nioSocketChannel.getContext().sendMessage(request, (v, e) -> {});
}
if (latch.await(30L, TimeUnit.SECONDS) == false) {
fail("Failed to get all expected responses.");
}
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
if (nioSocketChannel != null) {
nioSocketChannel.close();
}
}
return content;
}
@Override
public void close() {
IOUtils.closeWhileHandlingException(nioGroup::close);
}
private class ClientChannelFactory extends ChannelFactory<NioServerSocketChannel, NioSocketChannel> {
private final CountDownLatch latch;
private final Collection<FullHttpResponse> content;
private ClientChannelFactory(CountDownLatch latch, Collection<FullHttpResponse> content) {
super(new RawChannelFactory(NetworkService.TCP_NO_DELAY.get(Settings.EMPTY),
NetworkService.TCP_KEEP_ALIVE.get(Settings.EMPTY),
NetworkService.TCP_REUSE_ADDRESS.get(Settings.EMPTY),
Math.toIntExact(NetworkService.TCP_SEND_BUFFER_SIZE.get(Settings.EMPTY).getBytes()),
Math.toIntExact(NetworkService.TCP_RECEIVE_BUFFER_SIZE.get(Settings.EMPTY).getBytes())));
this.latch = latch;
this.content = content;
}
@Override
public NioSocketChannel createChannel(NioSelector selector, java.nio.channels.SocketChannel channel) throws IOException {
NioSocketChannel nioSocketChannel = new NioSocketChannel(channel);
HttpClientHandler handler = new HttpClientHandler(nioSocketChannel, latch, content);
Consumer<Exception> exceptionHandler = (e) -> {
latch.countDown();
onException(e);
nioSocketChannel.close();
};
SocketChannelContext context = new BytesChannelContext(nioSocketChannel, selector, exceptionHandler, handler,
InboundChannelBuffer.allocatingInstance());
nioSocketChannel.setContext(context);
return nioSocketChannel;
}
@Override
public NioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) {
throw new UnsupportedOperationException("Cannot create server channel");
}
}
private static class HttpClientHandler implements ReadWriteHandler {
private final NettyAdaptor adaptor;
private final CountDownLatch latch;
private final Collection<FullHttpResponse> content;
private HttpClientHandler(NioSocketChannel channel, CountDownLatch latch, Collection<FullHttpResponse> content) {
this.latch = latch;
this.content = content;
final int maxContentLength = Math.toIntExact(new ByteSizeValue(100, ByteSizeUnit.MB).getBytes());
List<ChannelHandler> handlers = new ArrayList<>(5);
handlers.add(new HttpResponseDecoder());
handlers.add(new HttpRequestEncoder());
handlers.add(new HttpObjectAggregator(maxContentLength));
adaptor = new NettyAdaptor(handlers.toArray(new ChannelHandler[0]));
adaptor.addCloseListener((v, e) -> channel.close());
}
@Override
public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Exception> listener) {
assert message instanceof HttpRequest : "Expected type HttpRequest.class, found: " + message.getClass();
return new WriteOperation() {
@Override
public BiConsumer<Void, Exception> getListener() {
return listener;
}
@Override
public SocketChannelContext getChannel() {
return context;
}
@Override
public Object getObject() {
return message;
}
};
}
@Override
public List<FlushOperation> writeToBytes(WriteOperation writeOperation) {
adaptor.write(writeOperation);
return pollFlushOperations();
}
@Override
public List<FlushOperation> pollFlushOperations() {
ArrayList<FlushOperation> copiedOperations = new ArrayList<>(adaptor.getOutboundCount());
FlushOperation flushOperation;
while ((flushOperation = adaptor.pollOutboundOperation()) != null) {
copiedOperations.add(flushOperation);
}
return copiedOperations;
}
@Override
public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException {
int bytesConsumed = adaptor.read(channelBuffer.sliceAndRetainPagesTo(channelBuffer.getIndex()));
Object message;
while ((message = adaptor.pollInboundMessage()) != null) {
handleRequest(message);
}
return bytesConsumed;
}
@Override
public void close() throws IOException {
try {
adaptor.close();
} catch (Exception e) {
throw new IOException(e);
}
}
private void handleRequest(Object message) {
final FullHttpResponse response = (FullHttpResponse) message;
DefaultFullHttpResponse newResponse = new DefaultFullHttpResponse(response.protocolVersion(),
response.status(),
Unpooled.copiedBuffer(response.content()),
response.headers().copy(),
response.trailingHeaders().copy());
response.release();
content.add(newResponse);
latch.countDown();
}
}
}

View File

@ -192,7 +192,7 @@ public class NioHttpServerTransportTests extends ESTestCase {
xContentRegistry(), dispatcher)) {
transport.start();
final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses());
try (Netty4HttpClient client = new Netty4HttpClient()) {
try (NioHttpClient client = new NioHttpClient()) {
final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/");
request.headers().set(HttpHeaderNames.EXPECT, expectation);
HttpUtil.setContentLength(request, contentLength);
@ -275,7 +275,7 @@ public class NioHttpServerTransportTests extends ESTestCase {
transport.start();
final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses());
try (Netty4HttpClient client = new Netty4HttpClient()) {
try (NioHttpClient client = new NioHttpClient()) {
final String url = "/" + new String(new byte[maxInitialLineLength], Charset.forName("UTF-8"));
final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url);

View File

@ -47,11 +47,11 @@ public class NioPipeliningIT extends NioIntegTestCase {
TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses();
TransportAddress transportAddress = randomFrom(boundAddresses);
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
try (NioHttpClient nettyHttpClient = new NioHttpClient()) {
Collection<FullHttpResponse> responses = nettyHttpClient.get(transportAddress.address(), requests);
assertThat(responses, hasSize(5));
Collection<String> opaqueIds = Netty4HttpClient.returnOpaqueIds(responses);
Collection<String> opaqueIds = NioHttpClient.returnOpaqueIds(responses);
assertOpaqueIdsInOrder(opaqueIds);
}
}

View File

@ -189,7 +189,10 @@ setup() {
@test "[SYSTEMD] start Elasticsearch with custom JVM options" {
assert_file_exist $ESENVFILE
local temp=`mktemp -d`
# The custom config directory is not under /tmp or /var/tmp because
# systemd's private temp directory functionaly means different
# processes can have different views of what's in these directories
local temp=`mktemp -p /etc -d`
cp "$ESCONFIG"/elasticsearch.yml "$temp"
cp "$ESCONFIG"/log4j2.properties "$temp"
touch "$temp/jvm.options"

View File

@ -92,11 +92,14 @@ fi
@test "[$GROUP] install a sample plugin with a symlinked plugins path" {
# Clean up after the last time this test was run
rm -rf /tmp/plugins.*
rm -rf /tmp/old_plugins.*
rm -rf /var/plugins.*
rm -rf /var/old_plugins.*
rm -rf "$ESPLUGINS"
local es_plugins=$(mktemp -d -t 'plugins.XXXX')
# The custom plugins directory is not under /tmp or /var/tmp because
# systemd's private temp directory functionaly means different
# processes can have different views of what's in these directories
local es_plugins=$(mktemp -p /var -d -t 'plugins.XXXX')
chown -R elasticsearch:elasticsearch "$es_plugins"
ln -s "$es_plugins" "$ESPLUGINS"

View File

@ -555,7 +555,10 @@ run_elasticsearch_tests() {
# Move the config directory to another directory and properly chown it.
move_config() {
local oldConfig="$ESCONFIG"
export ESCONFIG="${1:-$(mktemp -d -t 'config.XXXX')}"
# The custom config directory is not under /tmp or /var/tmp because
# systemd's private temp directory functionaly means different
# processes can have different views of what's in these directories
export ESCONFIG="${1:-$(mktemp -p /etc -d -t 'config.XXXX')}"
echo "Moving configuration directory from $oldConfig to $ESCONFIG"
# Move configuration files to the new configuration directory

View File

@ -1,7 +1,14 @@
setup:
- do:
indices.create:
index: test
index: test
body:
mappings:
test:
properties:
numeric_group: { type: integer }
group_alias: { type: alias, path: numeric_group }
- do:
index:
index: test
@ -341,3 +348,25 @@ setup:
- match: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._version: 55 }
- match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._id: "4" }
- match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._version: 44 }
---
"field collapsing on a field alias":
- skip:
version: " - 6.3.99"
reason: Field aliases were introduced in 6.4.0.
- do:
search:
index: test
body:
collapse: { field: group_alias, inner_hits: { name: sub_hits } }
sort: [{ sort: desc }]
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0.fields.group_alias: [3] }
- match: { hits.hits.0.inner_hits.sub_hits.hits.total: 1}
- match: { hits.hits.1.fields.group_alias: [1] }
- match: { hits.hits.1.inner_hits.sub_hits.hits.total: 3}
- match: { hits.hits.2.fields.group_alias: [25] }
- match: { hits.hits.2.inner_hits.sub_hits.hits.total: 2}

View File

@ -308,13 +308,8 @@ public final class ExceptionsHelper {
}
}
this.index = indexName;
if (cause == null) {
this.reason = failure.reason();
this.causeType = null;
} else {
this.reason = cause.getMessage();
this.causeType = cause.getClass();
}
this.reason = cause.getMessage();
this.causeType = cause.getClass();
}
@Override

View File

@ -19,39 +19,70 @@
package org.elasticsearch.action;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.rest.RestStatus;
import java.util.Objects;
/**
* An exception indicating that a failure occurred performing an operation on the shard.
*
*
*/
public interface ShardOperationFailedException extends Streamable, ToXContent {
public abstract class ShardOperationFailedException implements Streamable, ToXContent {
protected String index;
protected int shardId;
protected String reason;
protected RestStatus status;
protected Throwable cause;
protected ShardOperationFailedException() {
}
protected ShardOperationFailedException(@Nullable String index, int shardId, String reason, RestStatus status, Throwable cause) {
this.index = index;
this.shardId = shardId;
this.reason = Objects.requireNonNull(reason, "reason cannot be null");
this.status = Objects.requireNonNull(status, "status cannot be null");
this.cause = Objects.requireNonNull(cause, "cause cannot be null");
}
/**
* The index the operation failed on. Might return {@code null} if it can't be derived.
*/
String index();
@Nullable
public final String index() {
return index;
}
/**
* The index the operation failed on. Might return {@code -1} if it can't be derived.
*/
int shardId();
public final int shardId() {
return shardId;
}
/**
* The reason of the failure.
*/
String reason();
public final String reason() {
return reason;
}
/**
* The status of the failure.
*/
RestStatus status();
public final RestStatus status() {
return status;
}
/**
* The cause of this failure
*/
Throwable getCause();
public final Throwable getCause() {
return cause;
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.shards;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
@ -248,7 +247,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
return nodeId;
}
public static Failure readFailure(StreamInput in) throws IOException {
static Failure readFailure(StreamInput in) throws IOException {
Failure failure = new Failure();
failure.readFrom(in);
return failure;

View File

@ -138,8 +138,7 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
builder.field("grouped", group); // notify that it's grouped
builder.field("failed_shards");
builder.startArray();
ShardOperationFailedException[] failures = params.paramAsBoolean("group_shard_failures", true) ?
ExceptionsHelper.groupBy(shardFailures) : shardFailures;
ShardOperationFailedException[] failures = group ? ExceptionsHelper.groupBy(shardFailures) : shardFailures;
for (ShardOperationFailedException failure : failures) {
builder.startObject();
failure.toXContent(builder, params);

View File

@ -43,7 +43,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
/**
* Represents a failure to search on a specific shard.
*/
public class ShardSearchFailure implements ShardOperationFailedException {
public class ShardSearchFailure extends ShardOperationFailedException {
private static final String REASON_FIELD = "reason";
private static final String NODE_FIELD = "node";
@ -53,9 +53,6 @@ public class ShardSearchFailure implements ShardOperationFailedException {
public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0];
private SearchShardTarget shardTarget;
private String reason;
private RestStatus status;
private Throwable cause;
private ShardSearchFailure() {
@ -66,25 +63,18 @@ public class ShardSearchFailure implements ShardOperationFailedException {
}
public ShardSearchFailure(Exception e, @Nullable SearchShardTarget shardTarget) {
super(shardTarget == null ? null : shardTarget.getFullyQualifiedIndexName(),
shardTarget == null ? -1 : shardTarget.getShardId().getId(),
ExceptionsHelper.detailedMessage(e),
ExceptionsHelper.status(ExceptionsHelper.unwrapCause(e)),
ExceptionsHelper.unwrapCause(e));
final Throwable actual = ExceptionsHelper.unwrapCause(e);
if (actual instanceof SearchException) {
this.shardTarget = ((SearchException) actual).shard();
} else if (shardTarget != null) {
this.shardTarget = shardTarget;
}
status = ExceptionsHelper.status(actual);
this.reason = ExceptionsHelper.detailedMessage(e);
this.cause = actual;
}
public ShardSearchFailure(String reason, SearchShardTarget shardTarget) {
this(reason, shardTarget, RestStatus.INTERNAL_SERVER_ERROR);
}
private ShardSearchFailure(String reason, SearchShardTarget shardTarget, RestStatus status) {
this.shardTarget = shardTarget;
this.reason = reason;
this.status = status;
}
/**
@ -95,41 +85,6 @@ public class ShardSearchFailure implements ShardOperationFailedException {
return this.shardTarget;
}
@Override
public RestStatus status() {
return this.status;
}
/**
* The index the search failed on.
*/
@Override
public String index() {
if (shardTarget != null) {
return shardTarget.getFullyQualifiedIndexName();
}
return null;
}
/**
* The shard id the search failed on.
*/
@Override
public int shardId() {
if (shardTarget != null) {
return shardTarget.getShardId().id();
}
return -1;
}
/**
* The reason of the failure.
*/
@Override
public String reason() {
return this.reason;
}
@Override
public String toString() {
return "shard [" + (shardTarget == null ? "_na" : shardTarget) + "], reason [" + reason + "], cause [" +
@ -172,12 +127,10 @@ public class ShardSearchFailure implements ShardOperationFailedException {
if (shardTarget != null) {
builder.field(NODE_FIELD, shardTarget.getNodeId());
}
if (cause != null) {
builder.field(REASON_FIELD);
builder.startObject();
ElasticsearchException.generateThrowableXContent(builder, params, cause);
builder.endObject();
}
builder.field(REASON_FIELD);
builder.startObject();
ElasticsearchException.generateThrowableXContent(builder, params, cause);
builder.endObject();
return builder;
}
@ -225,9 +178,4 @@ public class ShardSearchFailure implements ShardOperationFailedException {
}
return new ShardSearchFailure(exception, searchShardTarget);
}
@Override
public Throwable getCause() {
return cause;
}
}

View File

@ -28,8 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
@ -37,7 +35,7 @@ import java.io.IOException;
import static org.elasticsearch.ExceptionsHelper.detailedMessage;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class DefaultShardOperationFailedException implements ShardOperationFailedException {
public class DefaultShardOperationFailedException extends ShardOperationFailedException {
private static final String INDEX = "index";
private static final String SHARD_ID = "shard";
@ -52,56 +50,16 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile
PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(REASON));
}
private String index;
private int shardId;
private Throwable reason;
private RestStatus status;
protected DefaultShardOperationFailedException() {
}
public DefaultShardOperationFailedException(ElasticsearchException e) {
Index index = e.getIndex();
this.index = index == null ? null : index.getName();
ShardId shardId = e.getShardId();
this.shardId = shardId == null ? -1 : shardId.id();
this.reason = e;
this.status = e.status();
super(e.getIndex() == null ? null : e.getIndex().getName(), e.getShardId() == null ? -1 : e.getShardId().getId(),
detailedMessage(e), e.status(), e);
}
public DefaultShardOperationFailedException(String index, int shardId, Throwable reason) {
this.index = index;
this.shardId = shardId;
this.reason = reason;
this.status = ExceptionsHelper.status(reason);
}
@Override
public String index() {
return this.index;
}
@Override
public int shardId() {
return this.shardId;
}
@Override
public String reason() {
return detailedMessage(reason);
}
@Override
public RestStatus status() {
return status;
}
@Override
public Throwable getCause() {
return reason;
public DefaultShardOperationFailedException(String index, int shardId, Throwable cause) {
super(index, shardId, detailedMessage(cause), ExceptionsHelper.status(cause), cause);
}
public static DefaultShardOperationFailedException readShardOperationFailed(StreamInput in) throws IOException {
@ -112,24 +70,17 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
index = in.readString();
}
index = in.readOptionalString();
shardId = in.readVInt();
reason = in.readException();
cause = in.readException();
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (index == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(index);
}
out.writeOptionalString(index);
out.writeVInt(shardId);
out.writeException(reason);
out.writeException(cause);
RestStatus.writeTo(out, status);
}
@ -145,7 +96,7 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile
builder.field("status", status.name());
if (reason != null) {
builder.startObject("reason");
ElasticsearchException.generateThrowableXContent(builder, params, reason);
ElasticsearchException.generateThrowableXContent(builder, params, cause);
builder.endObject();
}
return builder;

View File

@ -218,13 +218,13 @@ public class ReplicationResponse extends ActionResponse {
'}';
}
public static ShardInfo readShardInfo(StreamInput in) throws IOException {
static ShardInfo readShardInfo(StreamInput in) throws IOException {
ShardInfo shardInfo = new ShardInfo();
shardInfo.readFrom(in);
return shardInfo;
}
public static class Failure implements ShardOperationFailedException, ToXContentObject {
public static class Failure extends ShardOperationFailedException implements ToXContentObject {
private static final String _INDEX = "_index";
private static final String _SHARD = "_shard";
@ -235,37 +235,18 @@ public class ReplicationResponse extends ActionResponse {
private ShardId shardId;
private String nodeId;
private Exception cause;
private RestStatus status;
private boolean primary;
public Failure(ShardId shardId, @Nullable String nodeId, Exception cause, RestStatus status, boolean primary) {
super(shardId.getIndexName(), shardId.getId(), ExceptionsHelper.detailedMessage(cause), status, cause);
this.shardId = shardId;
this.nodeId = nodeId;
this.cause = cause;
this.status = status;
this.primary = primary;
}
Failure() {
}
/**
* @return On what index the failure occurred.
*/
@Override
public String index() {
return shardId.getIndexName();
}
/**
* @return On what shard id the failure occurred.
*/
@Override
public int shardId() {
return shardId.id();
}
public ShardId fullShardId() {
return shardId;
}
@ -278,27 +259,6 @@ public class ReplicationResponse extends ActionResponse {
return nodeId;
}
/**
* @return A text description of the failure
*/
@Override
public String reason() {
return ExceptionsHelper.detailedMessage(cause);
}
/**
* @return The status to report if this failure was a primary failure.
*/
@Override
public RestStatus status() {
return status;
}
@Override
public Throwable getCause() {
return cause;
}
/**
* @return Whether this failure occurred on a primary shard.
* (this only reports true for delete by query)
@ -310,6 +270,8 @@ public class ReplicationResponse extends ActionResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = ShardId.readShardId(in);
super.shardId = shardId.getId();
super.index = shardId.getIndexName();
nodeId = in.readOptionalString();
cause = in.readException();
status = RestStatus.readFrom(in);

View File

@ -126,7 +126,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
*/
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (isSameIndex(value, context.getFullyQualifiedIndexName())) {
if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) {
return Queries.newMatchAllQuery();
} else {
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value);
@ -139,14 +139,14 @@ public class IndexFieldMapper extends MetadataFieldMapper {
return super.termsQuery(values, context);
}
for (Object value : values) {
if (isSameIndex(value, context.getFullyQualifiedIndexName())) {
if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) {
// No need to OR these clauses - we can only logically be
// running in the context of just one of these index names.
return Queries.newMatchAllQuery();
}
}
// None of the listed index names are this one
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndexName()
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndex().getName()
+ " vs. " + values);
}
@ -189,5 +189,4 @@ public class IndexFieldMapper extends MetadataFieldMapper {
protected void doMerge(Mapper mergeWith) {
// nothing to do
}
}

View File

@ -83,7 +83,7 @@ public class QueryShardContext extends QueryRewriteContext {
private String[] types = Strings.EMPTY_ARRAY;
private boolean cachable = true;
private final SetOnce<Boolean> frozen = new SetOnce<>();
private final String fullyQualifiedIndexName;
private final Index fullyQualifiedIndex;
public void setTypes(String... types) {
this.types = types;
@ -116,7 +116,8 @@ public class QueryShardContext extends QueryRewriteContext {
this.indexSettings = indexSettings;
this.reader = reader;
this.clusterAlias = clusterAlias;
this.fullyQualifiedIndexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName());
this.fullyQualifiedIndex = new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()),
indexSettings.getIndex().getUUID());
}
public QueryShardContext(QueryShardContext source) {
@ -163,7 +164,7 @@ public class QueryShardContext extends QueryRewriteContext {
}
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndexName);
return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName());
}
public void addNamedQuery(String name, Query query) {
@ -275,7 +276,7 @@ public class QueryShardContext extends QueryRewriteContext {
public SearchLookup lookup() {
if (lookup == null) {
lookup = new SearchLookup(getMapperService(),
mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndexName), types);
mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndex.getName()), types);
}
return lookup;
}
@ -426,9 +427,9 @@ public class QueryShardContext extends QueryRewriteContext {
}
/**
* Returns the fully qualified index name including a remote cluster alias if applicable
* Returns the fully qualified index including a remote cluster alias if applicable, and the index uuid
*/
public String getFullyQualifiedIndexName() {
return fullyQualifiedIndexName;
public Index getFullyQualifiedIndex() {
return fullyQualifiedIndex;
}
}

View File

@ -37,16 +37,15 @@ public class QueryShardException extends ElasticsearchException {
}
public QueryShardException(QueryShardContext context, String msg, Throwable cause, Object... args) {
super(msg, cause, args);
setIndex(context.getFullyQualifiedIndexName());
this(context.getFullyQualifiedIndex(), msg, cause, args);
}
/**
* This constructor is provided for use in unit tests where a
* {@link QueryShardContext} may not be available
*/
public QueryShardException(Index index, String msg, Throwable cause) {
super(msg, cause);
public QueryShardException(Index index, String msg, Throwable cause, Object... args) {
super(msg, cause, args);
setIndex(index);
}

View File

@ -39,6 +39,7 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.OptionalLong;
import java.util.Set;
import java.util.function.Function;
@ -127,6 +128,13 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
*/
final Map<String, CheckpointState> checkpoints;
/**
* A callback invoked when the global checkpoint is updated. For primary mode this occurs if the computed global checkpoint advances on
* the basis of state changes tracked here. For non-primary mode this occurs if the local knowledge of the global checkpoint advances
* due to an update from the primary.
*/
private final LongConsumer onGlobalCheckpointUpdated;
/**
* This set contains allocation IDs for which there is a thread actively waiting for the local checkpoint to advance to at least the
* current global checkpoint.
@ -391,7 +399,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
final ShardId shardId,
final String allocationId,
final IndexSettings indexSettings,
final long globalCheckpoint) {
final long globalCheckpoint,
final LongConsumer onGlobalCheckpointUpdated) {
super(shardId, indexSettings);
assert globalCheckpoint >= SequenceNumbers.UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint;
this.shardAllocationId = allocationId;
@ -400,6 +409,7 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
this.appliedClusterStateVersion = -1L;
this.checkpoints = new HashMap<>(1 + indexSettings.getNumberOfReplicas());
checkpoints.put(allocationId, new CheckpointState(SequenceNumbers.UNASSIGNED_SEQ_NO, globalCheckpoint, false, false));
this.onGlobalCheckpointUpdated = Objects.requireNonNull(onGlobalCheckpointUpdated);
this.pendingInSync = new HashSet<>();
this.routingTable = null;
this.replicationGroup = null;
@ -456,7 +466,10 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
updateGlobalCheckpoint(
shardAllocationId,
globalCheckpoint,
current -> logger.trace("updating global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason));
current -> {
logger.trace("updated global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason);
onGlobalCheckpointUpdated.accept(globalCheckpoint);
});
assert invariant();
}
@ -474,7 +487,7 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
allocationId,
globalCheckpoint,
current -> logger.trace(
"updating local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]",
"updated local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]",
allocationId,
current,
globalCheckpoint));
@ -485,8 +498,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
final CheckpointState cps = checkpoints.get(allocationId);
assert !this.shardAllocationId.equals(allocationId) || cps != null;
if (cps != null && globalCheckpoint > cps.globalCheckpoint) {
ifUpdated.accept(cps.globalCheckpoint);
cps.globalCheckpoint = globalCheckpoint;
ifUpdated.accept(cps.globalCheckpoint);
}
}
@ -737,8 +750,9 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L
assert computedGlobalCheckpoint >= globalCheckpoint : "new global checkpoint [" + computedGlobalCheckpoint +
"] is lower than previous one [" + globalCheckpoint + "]";
if (globalCheckpoint != computedGlobalCheckpoint) {
logger.trace("global checkpoint updated to [{}]", computedGlobalCheckpoint);
cps.globalCheckpoint = computedGlobalCheckpoint;
logger.trace("updated global checkpoint to [{}]", computedGlobalCheckpoint);
onGlobalCheckpointUpdated.accept(computedGlobalCheckpoint);
}
}

View File

@ -297,8 +297,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP);
this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays);
this.replicationTracker = new ReplicationTracker(shardId, shardRouting.allocationId().getId(), indexSettings,
SequenceNumbers.UNASSIGNED_SEQ_NO);
final String aId = shardRouting.allocationId().getId();
this.replicationTracker =
new ReplicationTracker(shardId, aId, indexSettings, SequenceNumbers.UNASSIGNED_SEQ_NO, globalCheckpoint -> {});
// the query cache is a node-level thing, however we want the most popular filters
// to be computed on a per-shard basis
if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) {
@ -1446,10 +1447,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
}
} else {
if (origin == Engine.Operation.Origin.PRIMARY) {
verifyPrimary();
assert assertPrimaryMode();
} else {
assert origin == Engine.Operation.Origin.REPLICA;
verifyReplicationTarget();
assert assertReplicationTarget();
}
if (writeAllowedStates.contains(state) == false) {
throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard state is one of " + writeAllowedStates + ", origin [" + origin + "]");
@ -1457,19 +1458,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
}
}
private void verifyPrimary() {
if (shardRouting.primary() == false) {
throw new IllegalStateException("shard " + shardRouting + " is not a primary");
}
private boolean assertPrimaryMode() {
assert shardRouting.primary() && replicationTracker.isPrimaryMode() : "shard " + shardRouting + " is not a primary shard in primary mode";
return true;
}
private void verifyReplicationTarget() {
final IndexShardState state = state();
if (shardRouting.primary() && shardRouting.active() && replicationTracker.isPrimaryMode()) {
// must use exception that is not ignored by replication logic. See TransportActions.isShardNotAvailableException
throw new IllegalStateException("active primary shard " + shardRouting + " cannot be a replication target before " +
"relocation hand off, state is [" + state + "]");
}
private boolean assertReplicationTarget() {
assert replicationTracker.isPrimaryMode() == false : "shard " + shardRouting + " in primary mode cannot be a replication target";
return true;
}
private void verifyNotClosed() throws IllegalIndexShardStateException {
@ -1716,7 +1712,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param checkpoint the local checkpoint for the shard
*/
public void updateLocalCheckpointForShard(final String allocationId, final long checkpoint) {
verifyPrimary();
assert assertPrimaryMode();
verifyNotClosed();
replicationTracker.updateLocalCheckpoint(allocationId, checkpoint);
}
@ -1728,7 +1724,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param globalCheckpoint the global checkpoint
*/
public void updateGlobalCheckpointForShard(final String allocationId, final long globalCheckpoint) {
verifyPrimary();
assert assertPrimaryMode();
verifyNotClosed();
replicationTracker.updateGlobalCheckpointForShard(allocationId, globalCheckpoint);
}
@ -1750,7 +1746,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param allocationId the allocation ID of the shard for which recovery was initiated
*/
public void initiateTracking(final String allocationId) {
verifyPrimary();
assert assertPrimaryMode();
replicationTracker.initiateTracking(allocationId);
}
@ -1763,7 +1759,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param localCheckpoint the current local checkpoint on the shard
*/
public void markAllocationIdAsInSync(final String allocationId, final long localCheckpoint) throws InterruptedException {
verifyPrimary();
assert assertPrimaryMode();
replicationTracker.markAllocationIdAsInSync(allocationId, localCheckpoint);
}
@ -1798,7 +1794,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @return a map from allocation ID to the local knowledge of the global checkpoint for that allocation ID
*/
public ObjectLongMap<String> getInSyncGlobalCheckpoints() {
verifyPrimary();
assert assertPrimaryMode();
verifyNotClosed();
return replicationTracker.getInSyncGlobalCheckpoints();
}
@ -1808,11 +1804,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* primary.
*/
public void maybeSyncGlobalCheckpoint(final String reason) {
verifyPrimary();
verifyNotClosed();
assert shardRouting.primary() : "only call maybeSyncGlobalCheckpoint on primary shard";
if (replicationTracker.isPrimaryMode() == false) {
return;
}
assert assertPrimaryMode();
// only sync if there are not operations in flight
final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint());
if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) {
@ -1838,7 +1835,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @return the replication group
*/
public ReplicationGroup getReplicationGroup() {
verifyPrimary();
assert assertPrimaryMode();
verifyNotClosed();
return replicationTracker.getReplicationGroup();
}
@ -1850,7 +1847,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param reason the reason the global checkpoint was updated
*/
public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) {
verifyReplicationTarget();
assert assertReplicationTarget();
final long localCheckpoint = getLocalCheckpoint();
if (globalCheckpoint > localCheckpoint) {
/*
@ -1877,8 +1874,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @param primaryContext the sequence number context
*/
public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext primaryContext) {
verifyPrimary();
assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting;
assert shardRouting.primary() && shardRouting.isRelocationTarget() : "only primary relocation target can update allocation IDs from primary context: " + shardRouting;
assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) &&
getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint();
synchronized (mutex) {
@ -1892,7 +1888,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* @return {@code true} if there is at least one shard pending in-sync, otherwise false
*/
public boolean pendingInSync() {
verifyPrimary();
assert assertPrimaryMode();
return replicationTracker.pendingInSync();
}
@ -2209,7 +2205,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
*/
public void acquirePrimaryOperationPermit(ActionListener<Releasable> onPermitAcquired, String executorOnDelay, Object debugInfo) {
verifyNotClosed();
verifyPrimary();
assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting;
indexShardOperationPermits.acquire(onPermitAcquired, executorOnDelay, false, debugInfo);
}
@ -2259,7 +2255,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
final ActionListener<Releasable> onPermitAcquired, final String executorOnDelay,
final Object debugInfo) {
verifyNotClosed();
verifyReplicationTarget();
if (opPrimaryTerm > pendingPrimaryTerm) {
synchronized (mutex) {
if (opPrimaryTerm > pendingPrimaryTerm) {
@ -2312,6 +2307,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
operationPrimaryTerm);
onPermitAcquired.onFailure(new IllegalStateException(message));
} else {
assert assertReplicationTarget();
try {
updateGlobalCheckpointOnReplica(globalCheckpoint, "operation");
} catch (Exception e) {

View File

@ -48,6 +48,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.rescore.RescorerBuilder;
import org.elasticsearch.search.rescore.Rescorer;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
@ -149,31 +150,61 @@ public interface SearchPlugin {
* Specification for a {@link Suggester}.
*/
class SuggesterSpec<T extends SuggestionBuilder<T>> extends SearchExtensionSpec<T, CheckedFunction<XContentParser, T, IOException>> {
private Writeable.Reader<? extends Suggest.Suggestion> suggestionReader;
/**
* Specification of custom {@link Suggester}.
*
* @param name holds the names by which this suggester might be parsed. The {@link ParseField#getPreferredName()} is special as it
* is the name by under which the reader is registered. So it is the name that the query should use as its
* {@link NamedWriteable#getWriteableName()} too.
* @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a
* is the name by under which the request builder and Suggestion response readers are registered. So it is the name that the
* query and Suggestion response should use as their {@link NamedWriteable#getWriteableName()} return values too.
* @param builderReader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a
* {@link StreamInput}
* @param parser the parser the reads the query suggester from xcontent
* @param builderParser a parser that reads the suggester's builder from xcontent
* @param suggestionReader the reader registered for this suggester's Suggestion response. Typically a reference to a constructor
* that takes a {@link StreamInput}
*/
public SuggesterSpec(ParseField name, Writeable.Reader<T> reader, CheckedFunction<XContentParser, T, IOException> parser) {
super(name, reader, parser);
public SuggesterSpec(
ParseField name,
Writeable.Reader<T> builderReader,
CheckedFunction<XContentParser, T, IOException> builderParser,
Writeable.Reader<? extends Suggest.Suggestion> suggestionReader) {
super(name, builderReader, builderParser);
setSuggestionReader(suggestionReader);
}
/**
* Specification of custom {@link Suggester}.
*
* @param name the name by which this suggester might be parsed or deserialized. Make sure that the query builder returns this name
* for {@link NamedWriteable#getWriteableName()}.
* @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a
* @param name the name by which this suggester might be parsed or deserialized. Make sure that the query builder and Suggestion
* response reader return this name for {@link NamedWriteable#getWriteableName()}.
* @param builderReader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a
* {@link StreamInput}
* @param parser the parser the reads the suggester builder from xcontent
* @param builderParser a parser that reads the suggester's builder from xcontent
* @param suggestionReader the reader registered for this suggester's Suggestion response. Typically a reference to a constructor
* that takes a {@link StreamInput}
*/
public SuggesterSpec(String name, Writeable.Reader<T> reader, CheckedFunction<XContentParser, T, IOException> parser) {
super(name, reader, parser);
public SuggesterSpec(
String name,
Writeable.Reader<T> builderReader,
CheckedFunction<XContentParser, T, IOException> builderParser,
Writeable.Reader<? extends Suggest.Suggestion> suggestionReader) {
super(name, builderReader, builderParser);
setSuggestionReader(suggestionReader);
}
private void setSuggestionReader(Writeable.Reader<? extends Suggest.Suggestion> reader) {
this.suggestionReader = reader;
}
/**
* Returns the reader used to read the {@link Suggest.Suggestion} generated by this suggester
*/
public Writeable.Reader<? extends Suggest.Suggestion> getSuggestionReader() {
return this.suggestionReader;
}
}

View File

@ -247,13 +247,17 @@ import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import java.util.ArrayList;
@ -590,9 +594,14 @@ public class SearchModule {
private void registerSuggesters(List<SearchPlugin> plugins) {
registerSmoothingModels(namedWriteables);
registerSuggester(new SuggesterSpec<>("term", TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent));
registerSuggester(new SuggesterSpec<>("phrase", PhraseSuggestionBuilder::new, PhraseSuggestionBuilder::fromXContent));
registerSuggester(new SuggesterSpec<>("completion", CompletionSuggestionBuilder::new, CompletionSuggestionBuilder::fromXContent));
registerSuggester(new SuggesterSpec<>(TermSuggestionBuilder.SUGGESTION_NAME,
TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent, TermSuggestion::new));
registerSuggester(new SuggesterSpec<>(PhraseSuggestionBuilder.SUGGESTION_NAME,
PhraseSuggestionBuilder::new, PhraseSuggestionBuilder::fromXContent, PhraseSuggestion::new));
registerSuggester(new SuggesterSpec<>(CompletionSuggestionBuilder.SUGGESTION_NAME,
CompletionSuggestionBuilder::new, CompletionSuggestionBuilder::fromXContent, CompletionSuggestion::new));
registerFromPlugin(plugins, SearchPlugin::getSuggesters, this::registerSuggester);
}
@ -602,6 +611,10 @@ public class SearchModule {
SuggestionBuilder.class, suggester.getName().getPreferredName(), suggester.getReader()));
namedXContents.add(new NamedXContentRegistry.Entry(SuggestionBuilder.class, suggester.getName(),
suggester.getParser()));
namedWriteables.add(new NamedWriteableRegistry.Entry(
Suggest.Suggestion.class, suggester.getName().getPreferredName(), suggester.getSuggestionReader()
));
}
private Map<String, Highlighter> setupHighlighters(Settings settings, List<SearchPlugin> plugins) {

View File

@ -247,6 +247,6 @@ public class CollapseBuilder implements Writeable, ToXContentObject {
+ field + "`, " + "only indexed field can retrieve `inner_hits`");
}
return new CollapseContext(fieldType, innerHits);
return new CollapseContext(field, fieldType, innerHits);
}
}

View File

@ -25,26 +25,31 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.InnerHitBuilder;
import java.util.Collections;
import java.util.List;
/**
* Context used for field collapsing
*/
public class CollapseContext {
private final String fieldName;
private final MappedFieldType fieldType;
private final List<InnerHitBuilder> innerHits;
public CollapseContext(MappedFieldType fieldType, InnerHitBuilder innerHit) {
this.fieldType = fieldType;
this.innerHits = Collections.singletonList(innerHit);
}
public CollapseContext(MappedFieldType fieldType, List<InnerHitBuilder> innerHits) {
public CollapseContext(String fieldName,
MappedFieldType fieldType,
List<InnerHitBuilder> innerHits) {
this.fieldName = fieldName;
this.fieldType = fieldType;
this.innerHits = innerHits;
}
/**
* The requested field name to collapse on.
*/
public String getFieldName() {
return fieldName;
}
/** The field type used for collapsing **/
public MappedFieldType getFieldType() {
return fieldType;

View File

@ -61,7 +61,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
if (context.collapse() != null) {
// retrieve the `doc_value` associated with the collapse field
String name = context.collapse().getFieldType().name();
String name = context.collapse().getFieldName();
if (context.docValueFieldsContext() == null) {
context.docValueFieldsContext(new DocValueFieldsContext(
Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT))));

View File

@ -50,7 +50,7 @@ public class InternalSearchResponse extends SearchResponseSections implements Wr
super(
SearchHits.readSearchHits(in),
in.readBoolean() ? InternalAggregations.readAggregations(in) : null,
in.readBoolean() ? Suggest.readSuggest(in) : null,
in.readBoolean() ? new Suggest(in) : null,
in.readBoolean(),
in.readOptionalBoolean(),
in.readOptionalWriteable(SearchProfileShardResults::new),
@ -62,7 +62,7 @@ public class InternalSearchResponse extends SearchResponseSections implements Wr
public void writeTo(StreamOutput out) throws IOException {
hits.writeTo(out);
out.writeOptionalStreamable((InternalAggregations)aggregations);
out.writeOptionalStreamable(suggest);
out.writeOptionalWriteable(suggest);
out.writeBoolean(timedOut);
out.writeOptionalBoolean(terminatedEarly);
out.writeOptionalWriteable(profileResults);

View File

@ -293,7 +293,7 @@ public final class QuerySearchResult extends SearchPhaseResult {
pipelineAggregators = in.readNamedWriteableList(PipelineAggregator.class).stream().map(a -> (SiblingPipelineAggregator) a)
.collect(Collectors.toList());
if (in.readBoolean()) {
suggest = Suggest.readSuggest(in);
suggest = new Suggest(in);
}
searchTimedOut = in.readBoolean();
terminatedEarly = in.readOptionalBoolean();

View File

@ -20,18 +20,18 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -53,16 +53,15 @@ import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* Top level suggest result, containing the result for each suggestion.
*/
public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? extends Option>>>, Streamable, ToXContentFragment {
public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? extends Option>>>, Writeable, ToXContentFragment {
public static final String NAME = "suggest";
@ -92,6 +91,40 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
this.hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs);
}
public Suggest(StreamInput in) throws IOException {
// in older versions, Suggestion types were serialized as Streamable
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
final int size = in.readVInt();
suggestions = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Suggestion<? extends Entry<? extends Option>> suggestion;
final int type = in.readVInt();
switch (type) {
case TermSuggestion.TYPE:
suggestion = new TermSuggestion(in);
break;
case CompletionSuggestion.TYPE:
suggestion = new CompletionSuggestion(in);
break;
case PhraseSuggestion.TYPE:
suggestion = new PhraseSuggestion(in);
break;
default:
throw new IllegalArgumentException("Unknown suggestion type with ordinal " + type);
}
suggestions.add(suggestion);
}
} else {
int suggestionCount = in.readVInt();
suggestions = new ArrayList<>(suggestionCount);
for (int i = 0; i < suggestionCount; i++) {
suggestions.add(in.readNamedWriteable(Suggestion.class));
}
}
hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs);
}
@Override
public Iterator<Suggestion<? extends Entry<? extends Option>>> iterator() {
return suggestions.iterator();
@ -125,42 +158,20 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
return hasScoreDocs;
}
@Override
public void readFrom(StreamInput in) throws IOException {
final int size = in.readVInt();
suggestions = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
// TODO: remove these complicated generics
Suggestion<? extends Entry<? extends Option>> suggestion;
final int type = in.readVInt();
switch (type) {
case TermSuggestion.TYPE:
suggestion = new TermSuggestion();
break;
case CompletionSuggestion.TYPE:
suggestion = new CompletionSuggestion();
break;
case 2: // CompletionSuggestion.TYPE
throw new IllegalArgumentException("Completion suggester 2.x is not supported anymore");
case PhraseSuggestion.TYPE:
suggestion = new PhraseSuggestion();
break;
default:
suggestion = new Suggestion();
break;
}
suggestion.readFrom(in);
suggestions.add(suggestion);
}
hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(suggestions.size());
for (Suggestion<?> command : suggestions) {
out.writeVInt(command.getWriteableType());
command.writeTo(out);
// in older versions, Suggestion types were serialized as Streamable
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeVInt(suggestions.size());
for (Suggestion<?> command : suggestions) {
out.writeVInt(command.getWriteableType());
command.writeTo(out);
}
} else {
out.writeVInt(suggestions.size());
for (Suggestion<? extends Entry<? extends Option>> suggestion : suggestions) {
out.writeNamedWriteable(suggestion);
}
}
}
@ -195,12 +206,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
return new Suggest(suggestions);
}
public static Suggest readSuggest(StreamInput in) throws IOException {
Suggest result = new Suggest();
result.readFrom(in);
return result;
}
public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) {
List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size());
for (java.util.Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) {
@ -232,10 +237,27 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
.collect(Collectors.toList());
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
return Objects.equals(suggestions, ((Suggest) other).suggestions);
}
@Override
public int hashCode() {
return Objects.hash(suggestions);
}
/**
* The suggestion responses corresponding with the suggestions in the request.
*/
public static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, Streamable, ToXContentFragment {
public abstract static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, NamedWriteable, ToXContentFragment {
private static final String NAME = "suggestion";
@ -252,6 +274,24 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
this.size = size; // The suggested term size specified in request, only used for merging shard responses
}
public Suggestion(StreamInput in) throws IOException {
name = in.readString();
size = in.readVInt();
// this is a hack to work around slightly different serialization order of earlier versions of TermSuggestion
if (in.getVersion().before(Version.V_7_0_0_alpha1) && this instanceof TermSuggestion) {
TermSuggestion t = (TermSuggestion) this;
t.setSort(SortBy.readFromStream(in));
}
int entriesCount = in.readVInt();
entries.clear();
for (int i = 0; i < entriesCount; i++) {
T newEntry = newEntry(in);
entries.add(newEntry);
}
}
public void addTerm(T entry) {
entries.add(entry);
}
@ -259,20 +299,14 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
/**
* Returns a integer representing the type of the suggestion. This is used for
* internal serialization over the network.
*
* This class is now serialized as a NamedWriteable and this method only remains for backwards compatibility
*/
public int getWriteableType() { // TODO remove this in favor of NamedWriteable
@Deprecated
public int getWriteableType() {
return TYPE;
}
/**
* Returns a string representing the type of the suggestion. This type is added to
* the suggestion name in the XContent response, so that it can later be used by
* REST clients to determine the internal type of the suggestion.
*/
protected String getType() {
return NAME;
}
@Override
public Iterator<T> iterator() {
return entries.iterator();
@ -346,57 +380,67 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
innerReadFrom(in);
int size = in.readVInt();
entries.clear();
for (int i = 0; i < size; i++) {
T newEntry = newEntry();
newEntry.readFrom(in);
entries.add(newEntry);
}
}
protected T newEntry() {
return (T)new Entry();
}
protected void innerReadFrom(StreamInput in) throws IOException {
name = in.readString();
size = in.readVInt();
}
protected abstract T newEntry();
protected abstract T newEntry(StreamInput in) throws IOException;
@Override
public void writeTo(StreamOutput out) throws IOException {
innerWriteTo(out);
out.writeString(name);
out.writeVInt(size);
// this is a hack to work around slightly different serialization order in older versions of TermSuggestion
if (out.getVersion().before(Version.V_7_0_0_alpha1) && this instanceof TermSuggestion) {
TermSuggestion termSuggestion = (TermSuggestion) this;
termSuggestion.getSort().writeTo(out);
}
out.writeVInt(entries.size());
for (Entry<?> entry : entries) {
entry.writeTo(out);
}
}
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeVInt(size);
}
@Override
public abstract String getWriteableName();
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (params.paramAsBoolean(RestSearchAction.TYPED_KEYS_PARAM, false)) {
// Concatenates the type and the name of the suggestion (ex: completion#foo)
builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getType(), getName()));
builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getWriteableName(), getName()));
} else {
builder.startArray(getName());
}
for (Entry<?> entry : entries) {
builder.startObject();
entry.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
Suggestion otherSuggestion = (Suggestion) other;
return Objects.equals(name, otherSuggestion.name)
&& Objects.equals(size, otherSuggestion.size)
&& Objects.equals(entries, otherSuggestion.entries);
}
@Override
public int hashCode() {
return Objects.hash(name, size, entries);
}
@SuppressWarnings("unchecked")
public static Suggestion<? extends Entry<? extends Option>> fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
@ -417,7 +461,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
/**
* Represents a part from the suggest text with suggested options.
*/
public static class Entry<O extends Entry.Option> implements Iterable<O>, Streamable, ToXContentObject {
public abstract static class Entry<O extends Option> implements Iterable<O>, Writeable, ToXContentFragment {
private static final String TEXT = "text";
private static final String OFFSET = "offset";
@ -436,7 +480,18 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
this.length = length;
}
protected Entry() {
protected Entry() {}
public Entry(StreamInput in) throws IOException {
text = in.readText();
offset = in.readVInt();
length = in.readVInt();
int suggestedWords = in.readVInt();
options = new ArrayList<>(suggestedWords);
for (int j = 0; j < suggestedWords; j++) {
O newOption = newOption(in);
options.add(newOption);
}
}
public void addOption(O option) {
@ -534,44 +589,27 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Entry<?> entry = (Entry<?>) o;
if (length != entry.length) return false;
if (offset != entry.offset) return false;
if (!this.text.equals(entry.text)) return false;
return true;
return Objects.equals(length, entry.length)
&& Objects.equals(offset, entry.offset)
&& Objects.equals(text, entry.text)
&& Objects.equals(options, entry.options);
}
@Override
public int hashCode() {
int result = text.hashCode();
result = 31 * result + offset;
result = 31 * result + length;
return result;
return Objects.hash(text, offset, length, options);
}
@Override
public void readFrom(StreamInput in) throws IOException {
text = in.readText();
offset = in.readVInt();
length = in.readVInt();
int suggestedWords = in.readVInt();
options = new ArrayList<>(suggestedWords);
for (int j = 0; j < suggestedWords; j++) {
O newOption = newOption();
newOption.readFrom(in);
options.add(newOption);
}
}
@SuppressWarnings("unchecked")
protected O newOption(){
return (O) new Option();
}
protected abstract O newOption();
protected abstract O newOption(StreamInput in) throws IOException;
@Override
public void writeTo(StreamOutput out) throws IOException {
@ -586,40 +624,29 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TEXT, text);
builder.field(OFFSET, offset);
builder.field(LENGTH, length);
builder.startArray(OPTIONS);
for (Option option : options) {
builder.startObject();
option.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
builder.endObject();
return builder;
}
private static ObjectParser<Entry<Option>, Void> PARSER = new ObjectParser<>("SuggestionEntryParser", true, Entry::new);
static {
declareCommonFields(PARSER);
PARSER.declareObjectArray(Entry::addOptions, (p,c) -> Option.fromXContent(p), new ParseField(OPTIONS));
}
protected static void declareCommonFields(ObjectParser<? extends Entry<? extends Option>, Void> parser) {
parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT));
parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET));
parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH));
}
public static Entry<? extends Option> fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* Contains the suggested text with its document frequency and score.
*/
public static class Option implements Streamable, ToXContentObject {
public abstract static class Option implements Writeable, ToXContentFragment {
public static final ParseField TEXT = new ParseField("text");
public static final ParseField HIGHLIGHTED = new ParseField("highlighted");
@ -646,7 +673,13 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
this(text, null, score);
}
public Option() {
public Option() {}
public Option(StreamInput in) throws IOException {
text = in.readText();
score = in.readFloat();
highlighted = in.readOptionalText();
collateMatch = in.readOptionalBoolean();
}
/**
@ -683,14 +716,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
this.score = score;
}
@Override
public void readFrom(StreamInput in) throws IOException {
text = in.readText();
score = in.readFloat();
highlighted = in.readOptionalText();
collateMatch = in.readOptionalBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeText(text);
@ -701,45 +726,19 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerToXContent(builder, params);
builder.endObject();
return builder;
}
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(TEXT.getPreferredName(), text);
if (highlighted != null) {
builder.field(HIGHLIGHTED.getPreferredName(), highlighted);
}
builder.field(SCORE.getPreferredName(), score);
if (collateMatch != null) {
builder.field(COLLATE_MATCH.getPreferredName(), collateMatch.booleanValue());
}
return builder;
}
private static final ConstructingObjectParser<Option, Void> PARSER = new ConstructingObjectParser<>("SuggestOptionParser",
true, args -> {
Text text = new Text((String) args[0]);
float score = (Float) args[1];
String highlighted = (String) args[2];
Text highlightedText = highlighted == null ? null : new Text(highlighted);
Boolean collateMatch = (Boolean) args[3];
return new Option(text, highlightedText, score, collateMatch);
});
static {
PARSER.declareString(constructorArg(), TEXT);
PARSER.declareFloat(constructorArg(), SCORE);
PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED);
PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH);
}
public static Option fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
protected void mergeInto(Option otherOption) {
score = Math.max(score, otherOption.score);
if (otherOption.collateMatch != null) {
@ -751,18 +750,25 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
}
}
/*
* We consider options equal if they have the same text, even if their other fields may differ
*/
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Option that = (Option) o;
return text.equals(that.text);
return Objects.equals(text, that.text);
}
@Override
public int hashCode() {
return text.hashCode();
return Objects.hash(text);
}
}
}

View File

@ -66,8 +66,7 @@ import static org.elasticsearch.search.suggest.Suggest.COMPARATOR;
*/
public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSuggestion.Entry> {
public static final String NAME = "completion";
@Deprecated
public static final int TYPE = 4;
private boolean skipDuplicates;
@ -86,14 +85,18 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
this.skipDuplicates = skipDuplicates;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
public CompletionSuggestion(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
skipDuplicates = in.readBoolean();
}
}
@Override
public String getWriteableName() {
return CompletionSuggestionBuilder.SUGGESTION_NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
@ -121,6 +124,17 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
return getOptions().size() > 0;
}
@Override
public boolean equals(Object other) {
return super.equals(other)
&& Objects.equals(skipDuplicates, ((CompletionSuggestion) other).skipDuplicates);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), skipDuplicates);
}
public static CompletionSuggestion fromXContent(XContentParser parser, String name) throws IOException {
CompletionSuggestion suggestion = new CompletionSuggestion(name, -1, false);
parseEntries(parser, suggestion, CompletionSuggestion.Entry::fromXContent);
@ -222,13 +236,13 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
}
@Override
protected String getType() {
return NAME;
protected Entry newEntry() {
return new Entry();
}
@Override
protected Entry newEntry() {
return new Entry();
protected Entry newEntry(StreamInput in) throws IOException {
return new Entry(in);
}
public static final class Entry extends Suggest.Suggestion.Entry<CompletionSuggestion.Entry.Option> {
@ -237,7 +251,10 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
super(text, offset, length);
}
Entry() {
Entry() {}
public Entry(StreamInput in) throws IOException {
super(in);
}
@Override
@ -245,6 +262,11 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
return new Option();
}
@Override
protected Option newOption(StreamInput in) throws IOException {
return new Option(in);
}
private static ObjectParser<Entry, Void> PARSER = new ObjectParser<>("CompletionSuggestionEntryParser", true,
Entry::new);
@ -274,6 +296,25 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
super();
}
public Option(StreamInput in) throws IOException {
super(in);
this.doc = Lucene.readScoreDoc(in);
if (in.readBoolean()) {
this.hit = SearchHit.readSearchHit(in);
}
int contextSize = in.readInt();
this.contexts = new LinkedHashMap<>(contextSize);
for (int i = 0; i < contextSize; i++) {
String contextName = in.readString();
int nContexts = in.readVInt();
Set<CharSequence> contexts = new HashSet<>(nContexts);
for (int j = 0; j < nContexts; j++) {
contexts.add(in.readString());
}
this.contexts.put(contextName, contexts);
}
}
@Override
protected void mergeInto(Suggest.Suggestion.Entry.Option otherOption) {
// Completion suggestions are reduced by
@ -302,7 +343,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(TEXT.getPreferredName(), getText());
if (hit != null) {
hit.toInnerXContent(builder, params);
@ -375,26 +416,6 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
return option;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
this.doc = Lucene.readScoreDoc(in);
if (in.readBoolean()) {
this.hit = SearchHit.readSearchHit(in);
}
int contextSize = in.readInt();
this.contexts = new LinkedHashMap<>(contextSize);
for (int i = 0; i < contextSize; i++) {
String contextName = in.readString();
int nContexts = in.readVInt();
Set<CharSequence> contexts = new HashSet<>(nContexts);
for (int j = 0; j < nContexts; j++) {
contexts.add(in.readString());
}
this.contexts.put(contextName, contexts);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);

View File

@ -59,10 +59,12 @@ import java.util.Objects;
public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> {
private static final XContentType CONTEXT_BYTES_XCONTENT_TYPE = XContentType.JSON;
static final String SUGGESTION_NAME = "completion";
static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
static final ParseField SKIP_DUPLICATES_FIELD = new ParseField("skip_duplicates");
public static final String SUGGESTION_NAME = "completion";
/**
* {
* "field" : STRING

View File

@ -133,9 +133,9 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
highlighted = new Text(spare.toString());
}
if (collatePrune) {
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch));
resultEntry.addOption(new PhraseSuggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch));
} else {
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
resultEntry.addOption(new PhraseSuggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
}
}
} else {

View File

@ -23,41 +23,55 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Suggestion entry returned from the {@link PhraseSuggester}.
*/
public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry> {
public static final String NAME = "phrase";
@Deprecated
public static final int TYPE = 3;
public PhraseSuggestion() {
}
public PhraseSuggestion() {}
public PhraseSuggestion(String name, int size) {
super(name, size);
}
public PhraseSuggestion(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return PhraseSuggestionBuilder.SUGGESTION_NAME;
}
@Override
public int getWriteableType() {
return TYPE;
}
@Override
protected String getType() {
return NAME;
protected Entry newEntry() {
return new Entry();
}
@Override
protected Entry newEntry() {
return new Entry();
protected Entry newEntry(StreamInput in) throws IOException {
return new Entry(in);
}
public static PhraseSuggestion fromXContent(XContentParser parser, String name) throws IOException {
@ -66,7 +80,7 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
return suggestion;
}
public static class Entry extends Suggestion.Entry<Suggestion.Entry.Option> {
public static class Entry extends Suggestion.Entry<PhraseSuggestion.Entry.Option> {
protected double cutoffScore = Double.MIN_VALUE;
@ -75,7 +89,15 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
this.cutoffScore = cutoffScore;
}
Entry() {
public Entry(Text text, int offset, int length) {
super(text, offset, length);
}
Entry() {}
public Entry(StreamInput in) throws IOException {
super(in);
cutoffScore = in.readDouble();
}
/**
@ -86,7 +108,7 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
}
@Override
protected void merge(Suggestion.Entry<Suggestion.Entry.Option> other) {
protected void merge(Suggestion.Entry<Option> other) {
super.merge(other);
// If the cluster contains both pre 0.90.4 and post 0.90.4 nodes then we'll see Suggestion.Entry
// objects being merged with PhraseSuggestion.Entry objects. We merge Suggestion.Entry objects
@ -100,7 +122,7 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
}
@Override
public void addOption(Suggestion.Entry.Option option) {
public void addOption(Option option) {
if (option.getScore() > this.cutoffScore) {
this.options.add(option);
}
@ -110,7 +132,8 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
static {
declareCommonFields(PARSER);
PARSER.declareObjectArray(Entry::addOptions, (p,c) -> Option.fromXContent(p), new ParseField(OPTIONS));
PARSER.declareObjectArray(Entry::addOptions, (ContextParser<Void, Option>) (p, c) -> Option.fromXContent(p),
new ParseField(OPTIONS));
}
public static Entry fromXContent(XContentParser parser) {
@ -118,9 +141,13 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
cutoffScore = in.readDouble();
protected Option newOption() {
return new Option();
}
@Override
protected Option newOption(StreamInput in) throws IOException {
return new Option(in);
}
@Override
@ -128,5 +155,56 @@ public class PhraseSuggestion extends Suggest.Suggestion<PhraseSuggestion.Entry>
super.writeTo(out);
out.writeDouble(cutoffScore);
}
@Override
public boolean equals(Object other) {
return super.equals(other)
&& Objects.equals(cutoffScore, ((Entry) other).cutoffScore);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), cutoffScore);
}
public static class Option extends Suggestion.Entry.Option {
public Option() {
super();
}
public Option(Text text, Text highlighted, float score, Boolean collateMatch) {
super(text, highlighted, score, collateMatch);
}
public Option(Text text, Text highlighted, float score) {
super(text, highlighted, score);
}
public Option(StreamInput in) throws IOException {
super(in);
}
private static final ConstructingObjectParser<Option, Void> PARSER = new ConstructingObjectParser<>("PhraseOptionParser",
true, args -> {
Text text = new Text((String) args[0]);
float score = (Float) args[1];
String highlighted = (String) args[2];
Text highlightedText = highlighted == null ? null : new Text(highlighted);
Boolean collateMatch = (Boolean) args[3];
return new Option(text, highlightedText, score, collateMatch);
});
static {
PARSER.declareString(constructorArg(), TEXT);
PARSER.declareFloat(constructorArg(), SCORE);
PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED);
PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH);
}
public static Option fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
}
}
}

View File

@ -59,7 +59,7 @@ import java.util.Set;
*/
public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionBuilder> {
private static final String SUGGESTION_NAME = "phrase";
public static final String SUGGESTION_NAME = "phrase";
protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors");
protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood");

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -28,11 +29,13 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import java.io.IOException;
import java.util.Comparator;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
@ -41,22 +44,29 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
*/
public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
public static final String NAME = "term";
@Deprecated
public static final int TYPE = 1;
public static final Comparator<Suggestion.Entry.Option> SCORE = new Score();
public static final Comparator<Suggestion.Entry.Option> FREQUENCY = new Frequency();
public static final int TYPE = 1;
private SortBy sort;
public TermSuggestion() {
}
public TermSuggestion() {}
public TermSuggestion(String name, int size, SortBy sort) {
super(name, size);
this.sort = sort;
}
public TermSuggestion(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
sort = SortBy.readFromStream(in);
}
}
// Same behaviour as comparators in suggest module, but for SuggestedWord
// Highest score first, then highest freq first, then lowest term first
public static class Score implements Comparator<Suggestion.Entry.Option> {
@ -103,9 +113,12 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
return TYPE;
}
@Override
protected String getType() {
return NAME;
public void setSort(SortBy sort) {
this.sort = sort;
}
public SortBy getSort() {
return sort;
}
@Override
@ -121,15 +134,17 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
}
@Override
protected void innerReadFrom(StreamInput in) throws IOException {
super.innerReadFrom(in);
sort = SortBy.readFromStream(in);
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
sort.writeTo(out);
}
}
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
super.innerWriteTo(out);
sort.writeTo(out);
public String getWriteableName() {
return TermSuggestionBuilder.SUGGESTION_NAME;
}
public static TermSuggestion fromXContent(XContentParser parser, String name) throws IOException {
@ -144,16 +159,35 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
return new Entry();
}
@Override
protected Entry newEntry(StreamInput in) throws IOException {
return new Entry(in);
}
@Override
public boolean equals(Object other) {
return super.equals(other)
&& Objects.equals(sort, ((TermSuggestion) other).sort);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), sort);
}
/**
* Represents a part from the suggest text with suggested options.
*/
public static class Entry extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry<TermSuggestion.Entry.Option> {
public static class Entry extends Suggest.Suggestion.Entry<TermSuggestion.Entry.Option> {
public Entry(Text text, int offset, int length) {
super(text, offset, length);
}
Entry() {
public Entry() {}
public Entry(StreamInput in) throws IOException {
super(in);
}
@Override
@ -161,6 +195,11 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
return new Option();
}
@Override
protected Option newOption(StreamInput in) throws IOException {
return new Option(in);
}
private static ObjectParser<Entry, Void> PARSER = new ObjectParser<>("TermSuggestionEntryParser", true, Entry::new);
static {
@ -175,7 +214,7 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
/**
* Contains the suggested text with its document frequency and score.
*/
public static class Option extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option {
public static class Option extends Suggest.Suggestion.Entry.Option {
public static final ParseField FREQ = new ParseField("freq");
@ -186,6 +225,11 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
this.freq = freq;
}
public Option(StreamInput in) throws IOException {
super(in);
freq = in.readVInt();
}
@Override
protected void mergeInto(Suggestion.Entry.Option otherOption) {
super.mergeInto(otherOption);
@ -207,12 +251,6 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
return freq;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
freq = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
@ -220,8 +258,8 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder = super.innerToXContent(builder, params);
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder = super.toXContent(builder, params);
builder.field(FREQ.getPreferredName(), freq);
return builder;
}

View File

@ -67,7 +67,7 @@ import static org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBu
*/
public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuilder> {
private static final String SUGGESTION_NAME = "term";
public static final String SUGGESTION_NAME = "term";
private SuggestMode suggestMode = SuggestMode.MISSING;
private float accuracy = DEFAULT_ACCURACY;

View File

@ -40,15 +40,11 @@ import java.util.Objects;
/**
* Stores information about failures that occurred during shard snapshotting process
*/
public class SnapshotShardFailure implements ShardOperationFailedException {
private ShardId shardId;
private String reason;
public class SnapshotShardFailure extends ShardOperationFailedException {
@Nullable
private String nodeId;
private RestStatus status;
private ShardId shardId;
private SnapshotShardFailure() {
@ -74,56 +70,9 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
* @param status rest status
*/
private SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason, RestStatus status) {
assert reason != null;
super(shardId.getIndexName(), shardId.id(), reason, status, new IndexShardSnapshotFailedException(shardId, reason));
this.nodeId = nodeId;
this.shardId = shardId;
this.reason = reason;
this.status = status;
}
/**
* Returns index where failure occurred
*
* @return index
*/
@Override
public String index() {
return this.shardId.getIndexName();
}
/**
* Returns shard id where failure occurred
*
* @return shard id
*/
@Override
public int shardId() {
return this.shardId.id();
}
/**
* Returns reason for the failure
*
* @return reason for the failure
*/
@Override
public String reason() {
return this.reason;
}
/**
* Returns {@link RestStatus} corresponding to this failure
*
* @return REST status
*/
@Override
public RestStatus status() {
return status;
}
@Override
public Throwable getCause() {
return new IndexShardSnapshotFailedException(shardId, reason);
}
/**
@ -142,7 +91,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
* @param in stream input
* @return shard failure information
*/
public static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) throws IOException {
static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) throws IOException {
SnapshotShardFailure exp = new SnapshotShardFailure();
exp.readFrom(in);
return exp;
@ -152,6 +101,8 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
shardId = ShardId.readShardId(in);
super.shardId = shardId.getId();
super.index = shardId.getIndexName();
reason = in.readString();
status = RestStatus.readFrom(in);
}

View File

@ -588,7 +588,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements
* takes a {@link ConnectionProfile} that have been passed as a parameter to the public methods
* and resolves it to a fully specified (i.e., no nulls) profile
*/
static ConnectionProfile resolveConnectionProfile(@Nullable ConnectionProfile connectionProfile,
protected static ConnectionProfile resolveConnectionProfile(@Nullable ConnectionProfile connectionProfile,
ConnectionProfile defaultConnectionProfile) {
Objects.requireNonNull(defaultConnectionProfile);
if (connectionProfile == null) {

View File

@ -174,36 +174,13 @@ public class ExceptionsHelperTests extends ESTestCase {
return new ShardSearchFailure(queryShardException, null);
}
public void testGroupByNullCause() {
ShardOperationFailedException[] failures = new ShardOperationFailedException[] {
new ShardSearchFailure("error", createSearchShardTarget("node0", 0, "index", null)),
new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", null)),
new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index2", null)),
new ShardSearchFailure("error", createSearchShardTarget("node2", 2, "index", "cluster1")),
new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", "cluster1")),
new ShardSearchFailure("a different error", createSearchShardTarget("node3", 3, "index", "cluster1"))
};
ShardOperationFailedException[] groupBy = ExceptionsHelper.groupBy(failures);
assertThat(groupBy.length, equalTo(4));
String[] expectedIndices = new String[]{"index", "index2", "cluster1:index", "cluster1:index"};
String[] expectedErrors = new String[]{"error", "error", "error", "a different error"};
int i = 0;
for (ShardOperationFailedException shardOperationFailedException : groupBy) {
assertThat(shardOperationFailedException.reason(), equalTo(expectedErrors[i]));
assertThat(shardOperationFailedException.index(), equalTo(expectedIndices[i++]));
}
}
public void testGroupByNullIndex() {
ShardOperationFailedException[] failures = new ShardOperationFailedException[] {
new ShardSearchFailure("error", null),
new ShardSearchFailure(new IllegalArgumentException("error")),
new ShardSearchFailure(new ParsingException(0, 0, "error", null)),
};
ShardOperationFailedException[] groupBy = ExceptionsHelper.groupBy(failures);
assertThat(groupBy.length, equalTo(3));
assertThat(groupBy.length, equalTo(2));
}
}

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class ShardOperationFailedExceptionTests extends ESTestCase {
public void testCauseCannotBeNull() {
NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure(
randomAlphaOfLengthBetween(3, 10), randomInt(), randomAlphaOfLengthBetween(5, 10), randomFrom(RestStatus.values()), null));
assertEquals("cause cannot be null", nullPointerException.getMessage());
}
public void testStatusCannotBeNull() {
NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure(
randomAlphaOfLengthBetween(3, 10), randomInt(), randomAlphaOfLengthBetween(5, 10), null, new IllegalArgumentException()));
assertEquals("status cannot be null", nullPointerException.getMessage());
}
public void testReasonCannotBeNull() {
NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure(
randomAlphaOfLengthBetween(3, 10), randomInt(), null, randomFrom(RestStatus.values()), new IllegalArgumentException()));
assertEquals("reason cannot be null", nullPointerException.getMessage());
}
public void testIndexIsNullable() {
new Failure(null, randomInt(), randomAlphaOfLengthBetween(5, 10), randomFrom(RestStatus.values()), new IllegalArgumentException());
}
private static class Failure extends ShardOperationFailedException {
Failure(@Nullable String index, int shardId, String reason, RestStatus status, Throwable cause) {
super(index, shardId, reason, status, cause);
}
@Override
public void readFrom(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return null;
}
}
}

View File

@ -22,8 +22,10 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData;
@ -37,6 +39,7 @@ import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -49,24 +52,7 @@ import static org.mockito.Mockito.when;
public class QueryShardContextTests extends ESTestCase {
public void testFailIfFieldMappingNotFound() {
IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index");
indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT)
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
);
IndexMetaData indexMetaData = indexMetadataBuilder.build();
IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY);
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
when(mapperService.index()).thenReturn(indexMetaData.getIndex());
final long nowInMillis = randomNonNegativeLong();
QueryShardContext context = new QueryShardContext(
0, indexSettings, null, (mappedFieldType, idxName) ->
mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null)
, mapperService, null, null, xContentRegistry(), writableRegistry(), null, null,
() -> nowInMillis, null);
QueryShardContext context = createQueryShardContext(IndexMetaData.INDEX_UUID_NA_VALUE, null);
context.setAllowUnmappedFields(false);
MappedFieldType fieldType = new TextFieldMapper.TextFieldType();
MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
@ -91,30 +77,16 @@ public class QueryShardContextTests extends ESTestCase {
}
public void testClusterAlias() throws IOException {
IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index");
indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT)
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
);
IndexMetaData indexMetaData = indexMetadataBuilder.build();
IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY);
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
when(mapperService.index()).thenReturn(indexMetaData.getIndex());
final long nowInMillis = randomNonNegativeLong();
Mapper.BuilderContext ctx = new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath());
IndexFieldMapper mapper = new IndexFieldMapper.Builder(null).build(ctx);
final String clusterAlias = randomBoolean() ? null : "remote_cluster";
QueryShardContext context = new QueryShardContext(
0, indexSettings, null, (mappedFieldType, indexname) ->
mappedFieldType.fielddataBuilder(indexname).build(indexSettings, mappedFieldType, null, null, mapperService)
, mapperService, null, null, xContentRegistry(), writableRegistry(), null, null,
() -> nowInMillis, clusterAlias);
QueryShardContext context = createQueryShardContext(IndexMetaData.INDEX_UUID_NA_VALUE, clusterAlias);
Mapper.BuilderContext ctx = new Mapper.BuilderContext(context.getIndexSettings().getSettings(), new ContentPath());
IndexFieldMapper mapper = new IndexFieldMapper.Builder(null).build(ctx);
IndexFieldData<?> forField = context.getForField(mapper.fieldType());
String expected = clusterAlias == null ? indexMetaData.getIndex().getName()
: clusterAlias + ":" + indexMetaData.getIndex().getName();
String expected = clusterAlias == null ? context.getIndexSettings().getIndexMetaData().getIndex().getName()
: clusterAlias + ":" + context.getIndexSettings().getIndex().getName();
assertEquals(expected, ((AbstractAtomicOrdinalsFieldData)forField.load(null)).getOrdinalsValues().lookupOrd(0).utf8ToString());
Query query = mapper.fieldType().termQuery("index", context);
if (clusterAlias == null) {
@ -133,4 +105,32 @@ public class QueryShardContextTests extends ESTestCase {
assertThat(query, Matchers.instanceOf(MatchNoDocsQuery.class));
}
public void testGetFullyQualifiedIndex() {
String clusterAlias = randomAlphaOfLengthBetween(5, 10);
String indexUuid = randomAlphaOfLengthBetween(3, 10);
QueryShardContext shardContext = createQueryShardContext(indexUuid, clusterAlias);
assertThat(shardContext.getFullyQualifiedIndex().getName(), equalTo(clusterAlias + ":index"));
assertThat(shardContext.getFullyQualifiedIndex().getUUID(), equalTo(indexUuid));
}
public static QueryShardContext createQueryShardContext(String indexUuid, String clusterAlias) {
IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index");
indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT)
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put(IndexMetaData.SETTING_INDEX_UUID, indexUuid)
);
IndexMetaData indexMetaData = indexMetadataBuilder.build();
IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY);
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
when(mapperService.index()).thenReturn(indexMetaData.getIndex());
final long nowInMillis = randomNonNegativeLong();
return new QueryShardContext(
0, indexSettings, null, (mappedFieldType, idxName) ->
mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null)
, mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), null, null,
() -> nowInMillis, clusterAlias);
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.CoreMatchers.equalTo;
public class QueryShardExceptionTests extends ESTestCase {
public void testCreateFromQueryShardContext() {
String indexUuid = randomAlphaOfLengthBetween(5, 10);
String clusterAlias = randomAlphaOfLengthBetween(5, 10);
QueryShardContext queryShardContext = QueryShardContextTests.createQueryShardContext(indexUuid, clusterAlias);
{
QueryShardException queryShardException = new QueryShardException(queryShardContext, "error");
assertThat(queryShardException.getIndex().getName(), equalTo(clusterAlias + ":index"));
assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid));
}
{
QueryShardException queryShardException = new QueryShardException(queryShardContext, "error", new IllegalArgumentException());
assertThat(queryShardException.getIndex().getName(), equalTo(clusterAlias + ":index"));
assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid));
}
}
public void testCreateFromIndex() {
String indexUuid = randomAlphaOfLengthBetween(5, 10);
String indexName = randomAlphaOfLengthBetween(5, 10);
Index index = new Index(indexName, indexUuid);
QueryShardException queryShardException = new QueryShardException(index, "error", new IllegalArgumentException());
assertThat(queryShardException.getIndex().getName(), equalTo(indexName));
assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid));
}
}

View File

@ -32,7 +32,6 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -143,7 +142,7 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
public void testIndexWildcard() throws IOException {
QueryShardContext context = createShardContext();
String index = context.getFullyQualifiedIndexName();
String index = context.getFullyQualifiedIndex().getName();
Query query = new WildcardQueryBuilder("_index", index).doToQuery(context);
assertThat(query instanceof MatchAllDocsQuery, equalTo(true));

View File

@ -47,7 +47,9 @@ import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.LongConsumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
@ -60,7 +62,7 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.not;
public class ReplicationTrackerTests extends ESTestCase {
public void testEmptyShards() {
final ReplicationTracker tracker = newTracker(AllocationId.newInitializing());
assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
@ -99,6 +101,11 @@ public class ReplicationTrackerTests extends ESTestCase {
return allocationIds.stream().map(AllocationId::getId).collect(Collectors.toSet());
}
private void updateLocalCheckpoint(final ReplicationTracker tracker, final String allocationId, final long localCheckpoint) {
tracker.updateLocalCheckpoint(allocationId, localCheckpoint);
assertThat(updatedGlobalCheckpoint.get(), equalTo(tracker.getGlobalCheckpoint()));
}
public void testGlobalCheckpointUpdate() {
final long initialClusterStateVersion = randomNonNegativeLong();
Map<AllocationId, Long> allocations = new HashMap<>();
@ -137,14 +144,14 @@ public class ReplicationTrackerTests extends ESTestCase {
assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1));
initializing.forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED));
assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1 + initializing.size()));
allocations.keySet().forEach(aId -> tracker.updateLocalCheckpoint(aId.getId(), allocations.get(aId)));
allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId)));
assertThat(tracker.getGlobalCheckpoint(), equalTo(minLocalCheckpoint));
// increment checkpoints
active.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4)));
initializing.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4)));
allocations.keySet().forEach(aId -> tracker.updateLocalCheckpoint(aId.getId(), allocations.get(aId)));
allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId)));
final long minLocalCheckpointAfterUpdates =
allocations.entrySet().stream().map(Map.Entry::getValue).min(Long::compareTo).orElse(UNASSIGNED_SEQ_NO);
@ -153,7 +160,7 @@ public class ReplicationTrackerTests extends ESTestCase {
final AllocationId extraId = AllocationId.newInitializing();
// first check that adding it without the master blessing doesn't change anything.
tracker.updateLocalCheckpoint(extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4));
updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4));
assertNull(tracker.checkpoints.get(extraId));
expectThrows(IllegalStateException.class, () -> tracker.initiateTracking(extraId.getId()));
@ -165,7 +172,7 @@ public class ReplicationTrackerTests extends ESTestCase {
// now notify for the new id
if (randomBoolean()) {
tracker.updateLocalCheckpoint(extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4));
updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4));
markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), randomInt((int) minLocalCheckpointAfterUpdates));
} else {
markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4));
@ -175,6 +182,64 @@ public class ReplicationTrackerTests extends ESTestCase {
assertThat(tracker.getGlobalCheckpoint(), greaterThan(minLocalCheckpoint));
}
public void testUpdateGlobalCheckpointOnReplica() {
final AllocationId active = AllocationId.newInitializing();
final ReplicationTracker tracker = newTracker(active);
final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE - 1);
tracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "test");
assertThat(updatedGlobalCheckpoint.get(), equalTo(globalCheckpoint));
final long nonUpdate = randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint);
updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO);
tracker.updateGlobalCheckpointOnReplica(nonUpdate, "test");
assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO));
final long update = randomLongBetween(globalCheckpoint, Long.MAX_VALUE);
tracker.updateGlobalCheckpointOnReplica(update, "test");
assertThat(updatedGlobalCheckpoint.get(), equalTo(update));
}
public void testMarkAllocationIdAsInSync() throws BrokenBarrierException, InterruptedException {
final long initialClusterStateVersion = randomNonNegativeLong();
Map<AllocationId, Long> activeWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1);
Set<AllocationId> active = new HashSet<>(activeWithCheckpoints.keySet());
Map<AllocationId, Long> initializingWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1);
Set<AllocationId> initializing = new HashSet<>(initializingWithCheckpoints.keySet());
final AllocationId primaryId = active.iterator().next();
final AllocationId replicaId = initializing.iterator().next();
final ReplicationTracker tracker = newTracker(primaryId);
tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId), emptySet());
final long localCheckpoint = randomLongBetween(0, Long.MAX_VALUE - 1);
tracker.activatePrimaryMode(localCheckpoint);
tracker.initiateTracking(replicaId.getId());
final CyclicBarrier barrier = new CyclicBarrier(2);
final Thread thread = new Thread(() -> {
try {
barrier.await();
tracker.markAllocationIdAsInSync(
replicaId.getId(),
randomLongBetween(NO_OPS_PERFORMED, localCheckpoint - 1));
barrier.await();
} catch (BrokenBarrierException | InterruptedException e) {
throw new AssertionError(e);
}
});
thread.start();
barrier.await();
awaitBusy(tracker::pendingInSync);
final long updatedLocalCheckpoint = randomLongBetween(1 + localCheckpoint, Long.MAX_VALUE);
// there is a shard copy pending in sync, the global checkpoint can not advance
updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO);
tracker.updateLocalCheckpoint(primaryId.getId(), updatedLocalCheckpoint);
assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO));
// we are implicitly marking the pending in sync copy as in sync with the current global checkpoint, no advancement should occur
tracker.updateLocalCheckpoint(replicaId.getId(), localCheckpoint);
assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO));
barrier.await();
thread.join();
// now we expect that the global checkpoint would advance
tracker.markAllocationIdAsInSync(replicaId.getId(), updatedLocalCheckpoint);
assertThat(updatedGlobalCheckpoint.get(), equalTo(updatedLocalCheckpoint));
}
public void testMissingActiveIdsPreventAdvance() {
final Map<AllocationId, Long> active = randomAllocationsWithLocalCheckpoints(2, 5);
final Map<AllocationId, Long> initializing = randomAllocationsWithLocalCheckpoints(0, 5);
@ -191,14 +256,16 @@ public class ReplicationTrackerTests extends ESTestCase {
.entrySet()
.stream()
.filter(e -> !e.getKey().equals(missingActiveID))
.forEach(e -> tracker.updateLocalCheckpoint(e.getKey().getId(), e.getValue()));
.forEach(e -> updateLocalCheckpoint(tracker, e.getKey().getId(), e.getValue()));
if (missingActiveID.equals(primaryId) == false) {
assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO));
}
// now update all knowledge of all shards
assigned.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP));
assigned.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP));
assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO)));
assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO)));
}
public void testMissingInSyncIdsPreventAdvance() {
@ -213,13 +280,15 @@ public class ReplicationTrackerTests extends ESTestCase {
randomSubsetOf(randomIntBetween(1, initializing.size() - 1),
initializing.keySet()).forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED));
active.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP));
active.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP));
assertThat(tracker.getGlobalCheckpoint(), equalTo(NO_OPS_PERFORMED));
assertThat(updatedGlobalCheckpoint.get(), equalTo(NO_OPS_PERFORMED));
// update again
initializing.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP));
initializing.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP));
assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO)));
assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO)));
}
public void testInSyncIdsAreIgnoredIfNotValidatedByMaster() {
@ -236,7 +305,7 @@ public class ReplicationTrackerTests extends ESTestCase {
List<Map<AllocationId, Long>> allocations = Arrays.asList(active, initializing, nonApproved);
Collections.shuffle(allocations, random());
allocations.forEach(a -> a.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP)));
allocations.forEach(a -> a.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)));
assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO)));
}
@ -271,7 +340,7 @@ public class ReplicationTrackerTests extends ESTestCase {
initializing.forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED));
}
if (randomBoolean()) {
allocations.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP));
allocations.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP));
}
// now remove shards
@ -281,9 +350,9 @@ public class ReplicationTrackerTests extends ESTestCase {
ids(activeToStay.keySet()),
routingTable(initializingToStay.keySet(), primaryId),
emptySet());
allocations.forEach((aid, ckp) -> tracker.updateLocalCheckpoint(aid.getId(), ckp + 10L));
allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L));
} else {
allocations.forEach((aid, ckp) -> tracker.updateLocalCheckpoint(aid.getId(), ckp + 10L));
allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L));
tracker.updateFromMaster(
initialClusterStateVersion + 2,
ids(activeToStay.keySet()),
@ -331,7 +400,7 @@ public class ReplicationTrackerTests extends ESTestCase {
final List<Integer> elements = IntStream.rangeClosed(0, globalCheckpoint - 1).boxed().collect(Collectors.toList());
Randomness.shuffle(elements);
for (int i = 0; i < elements.size(); i++) {
tracker.updateLocalCheckpoint(trackingAllocationId.getId(), elements.get(i));
updateLocalCheckpoint(tracker, trackingAllocationId.getId(), elements.get(i));
assertFalse(complete.get());
assertFalse(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId()).inSync);
assertBusy(() -> assertTrue(tracker.pendingInSync.contains(trackingAllocationId.getId())));
@ -339,7 +408,7 @@ public class ReplicationTrackerTests extends ESTestCase {
if (randomBoolean()) {
// normal path, shard catches up
tracker.updateLocalCheckpoint(trackingAllocationId.getId(), randomIntBetween(globalCheckpoint, 64));
updateLocalCheckpoint(tracker, trackingAllocationId.getId(), randomIntBetween(globalCheckpoint, 64));
// synchronize with the waiting thread to mark that it is complete
barrier.await();
assertTrue(complete.get());
@ -355,13 +424,16 @@ public class ReplicationTrackerTests extends ESTestCase {
assertFalse(tracker.pendingInSync.contains(trackingAllocationId.getId()));
thread.join();
}
private AtomicLong updatedGlobalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO);
private ReplicationTracker newTracker(final AllocationId allocationId) {
return new ReplicationTracker(
new ShardId("test", "_na_", 0),
allocationId.getId(),
IndexSettingsModule.newIndexSettings("test", Settings.EMPTY),
UNASSIGNED_SEQ_NO);
UNASSIGNED_SEQ_NO,
updatedGlobalCheckpoint::set);
}
public void testWaitForAllocationIdToBeInSyncCanBeInterrupted() throws BrokenBarrierException, InterruptedException {
@ -488,10 +560,10 @@ public class ReplicationTrackerTests extends ESTestCase {
// the tracking allocation IDs should play no role in determining the global checkpoint
final Map<AllocationId, Integer> activeLocalCheckpoints =
newActiveAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024)));
activeLocalCheckpoints.forEach((a, l) -> tracker.updateLocalCheckpoint(a.getId(), l));
activeLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l));
final Map<AllocationId, Integer> initializingLocalCheckpoints =
newInitializingAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024)));
initializingLocalCheckpoints.forEach((a, l) -> tracker.updateLocalCheckpoint(a.getId(), l));
initializingLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l));
assertTrue(
activeLocalCheckpoints
.entrySet()
@ -504,6 +576,7 @@ public class ReplicationTrackerTests extends ESTestCase {
.allMatch(e -> tracker.getTrackedLocalCheckpointForShard(e.getKey().getId()).getLocalCheckpoint() == e.getValue()));
final long minimumActiveLocalCheckpoint = (long) activeLocalCheckpoints.values().stream().min(Integer::compareTo).get();
assertThat(tracker.getGlobalCheckpoint(), equalTo(minimumActiveLocalCheckpoint));
assertThat(updatedGlobalCheckpoint.get(), equalTo(minimumActiveLocalCheckpoint));
final long minimumInitailizingLocalCheckpoint = (long) initializingLocalCheckpoints.values().stream().min(Integer::compareTo).get();
// now we are going to add a new allocation ID and bring it in sync which should move it to the in-sync allocation IDs
@ -635,10 +708,11 @@ public class ReplicationTrackerTests extends ESTestCase {
FakeClusterState clusterState = initialState();
final AllocationId primaryAllocationId = clusterState.routingTable.primaryShard().allocationId();
final LongConsumer onUpdate = updatedGlobalCheckpoint -> {};
ReplicationTracker oldPrimary =
new ReplicationTracker(shardId, primaryAllocationId.getId(), indexSettings, UNASSIGNED_SEQ_NO);
new ReplicationTracker(shardId, primaryAllocationId.getId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate);
ReplicationTracker newPrimary =
new ReplicationTracker(shardId, primaryAllocationId.getRelocationId(), indexSettings, UNASSIGNED_SEQ_NO);
new ReplicationTracker(shardId, primaryAllocationId.getRelocationId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate);
Set<String> allocationIds = new HashSet<>(Arrays.asList(oldPrimary.shardAllocationId, newPrimary.shardAllocationId));

View File

@ -31,6 +31,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Assertions;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
@ -560,28 +561,20 @@ public class IndexShardTests extends IndexShardTestCase {
ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null,
true, ShardRoutingState.STARTED, replicaRouting.allocationId());
final long newPrimaryTerm = indexShard.getPendingPrimaryTerm() + between(1, 1000);
CountDownLatch latch = new CountDownLatch(1);
indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> {
assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm));
latch.countDown();
}, 0L,
Collections.singleton(indexShard.routingEntry().allocationId().getId()),
new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build(),
Collections.emptySet());
latch.await();
} else {
indexShard = newStartedShard(true);
}
final long primaryTerm = indexShard.getPendingPrimaryTerm();
assertEquals(0, indexShard.getActiveOperationsCount());
if (indexShard.routingEntry().isRelocationTarget() == false) {
try {
final PlainActionFuture<Releasable> permitAcquiredFuture = new PlainActionFuture<>();
indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), permitAcquiredFuture,
ThreadPool.Names.WRITE, "");
permitAcquiredFuture.actionGet();
fail("shard shouldn't accept operations as replica");
} catch (IllegalStateException ignored) {
}
}
Releasable operation1 = acquirePrimaryOperationPermitBlockingly(indexShard);
assertEquals(1, indexShard.getActiveOperationsCount());
Releasable operation2 = acquirePrimaryOperationPermitBlockingly(indexShard);
@ -590,6 +583,22 @@ public class IndexShardTests extends IndexShardTestCase {
Releasables.close(operation1, operation2);
assertEquals(0, indexShard.getActiveOperationsCount());
if (Assertions.ENABLED && indexShard.routingEntry().isRelocationTarget() == false) {
assertThat(expectThrows(AssertionError.class, () -> indexShard.acquireReplicaOperationPermit(primaryTerm,
indexShard.getGlobalCheckpoint(), new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
fail();
}
@Override
public void onFailure(Exception e) {
fail();
}
},
ThreadPool.Names.WRITE, "")).getMessage(), containsString("in primary mode cannot be a replication target"));
}
closeShards(indexShard);
}
@ -647,11 +656,11 @@ public class IndexShardTests extends IndexShardTestCase {
logger.info("shard routing to {}", shardRouting);
assertEquals(0, indexShard.getActiveOperationsCount());
if (shardRouting.primary() == false) {
final IllegalStateException e =
expectThrows(IllegalStateException.class,
if (shardRouting.primary() == false && Assertions.ENABLED) {
final AssertionError e =
expectThrows(AssertionError.class,
() -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, ""));
assertThat(e, hasToString(containsString("shard " + shardRouting + " is not a primary")));
assertThat(e, hasToString(containsString("acquirePrimaryOperationPermit should only be called on primary shard")));
}
final long primaryTerm = indexShard.getPendingPrimaryTerm();

View File

@ -202,49 +202,6 @@ public class RestActionsTests extends ESTestCase {
new ShardId(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE);
}
public void testBuildBroadcastShardsHeaderNullCause() throws Exception {
ShardOperationFailedException[] failures = new ShardOperationFailedException[] {
new ShardSearchFailure("error", createSearchShardTarget("node0", 0, "index", null)),
new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", null)),
new ShardSearchFailure("error", createSearchShardTarget("node2", 2, "index", "cluster1")),
new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", "cluster1")),
new ShardSearchFailure("a different error", createSearchShardTarget("node3", 3, "index", "cluster1"))
};
XContentBuilder builder = JsonXContent.contentBuilder();
builder.prettyPrint();
builder.startObject();
RestActions.buildBroadcastShardsHeader(builder, ToXContent.EMPTY_PARAMS, 12, 3, 0, 9, failures);
builder.endObject();
//TODO the reason is not printed out, as a follow-up we should probably either print it out when the cause is null,
//or even better enforce that the cause can't be null
assertThat(Strings.toString(builder), equalTo("{\n" +
" \"_shards\" : {\n" +
" \"total\" : 12,\n" +
" \"successful\" : 3,\n" +
" \"skipped\" : 0,\n" +
" \"failed\" : 9,\n" +
" \"failures\" : [\n" +
" {\n" +
" \"shard\" : 0,\n" +
" \"index\" : \"index\",\n" +
" \"node\" : \"node0\"\n" +
" },\n" +
" {\n" +
" \"shard\" : 2,\n" +
" \"index\" : \"cluster1:index\",\n" +
" \"node\" : \"node2\"\n" +
" },\n" +
" {\n" +
" \"shard\" : 3,\n" +
" \"index\" : \"cluster1:index\",\n" +
" \"node\" : \"node3\"\n" +
" }\n" +
" ]\n" +
" }\n" +
"}"));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;

View File

@ -18,6 +18,8 @@
*/
package org.elasticsearch.search;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.inject.ModuleTestCase;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -64,8 +66,11 @@ import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.rescore.RescorerBuilder;
import org.elasticsearch.search.suggest.CustomSuggesterSearchIT.CustomSuggestionBuilder;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import java.io.IOException;
@ -98,7 +103,8 @@ public class SearchModuleTests extends ModuleTestCase {
SearchPlugin registersDupeSuggester = new SearchPlugin() {
@Override
public List<SearchPlugin.SuggesterSpec<?>> getSuggesters() {
return singletonList(new SuggesterSpec<>("term", TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent));
return singletonList(new SuggesterSpec<>(TermSuggestionBuilder.SUGGESTION_NAME,
TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent, TermSuggestion::new));
}
};
expectThrows(IllegalArgumentException.class, registryForPlugin(registersDupeSuggester));
@ -183,9 +189,15 @@ public class SearchModuleTests extends ModuleTestCase {
SearchModule module = new SearchModule(Settings.EMPTY, false, singletonList(new SearchPlugin() {
@Override
public List<SuggesterSpec<?>> getSuggesters() {
return singletonList(new SuggesterSpec<>("custom", CustomSuggestionBuilder::new, CustomSuggestionBuilder::fromXContent));
return singletonList(
new SuggesterSpec<>(
TestSuggestionBuilder.SUGGESTION_NAME,
TestSuggestionBuilder::new,
TestSuggestionBuilder::fromXContent,
TestSuggestion::new));
}
}));
assertEquals(1, module.getNamedXContents().stream()
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) &&
e.name.match("term", LoggingDeprecationHandler.INSTANCE)).count());
@ -197,7 +209,7 @@ public class SearchModuleTests extends ModuleTestCase {
e.name.match("completion", LoggingDeprecationHandler.INSTANCE)).count());
assertEquals(1, module.getNamedXContents().stream()
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) &&
e.name.match("custom", LoggingDeprecationHandler.INSTANCE)).count());
e.name.match("test", LoggingDeprecationHandler.INSTANCE)).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) && e.name.equals("term")).count());
@ -206,7 +218,16 @@ public class SearchModuleTests extends ModuleTestCase {
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) && e.name.equals("completion")).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) && e.name.equals("custom")).count());
.filter(e -> e.categoryClass.equals(SuggestionBuilder.class) && e.name.equals("test")).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(Suggestion.class) && e.name.equals("term")).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(Suggestion.class) && e.name.equals("phrase")).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(Suggestion.class) && e.name.equals("completion")).count());
assertEquals(1, module.getNamedWriteables().stream()
.filter(e -> e.categoryClass.equals(Suggestion.class) && e.name.equals("test")).count());
}
public void testRegisterHighlighter() {
@ -498,4 +519,77 @@ public class SearchModuleTests extends ModuleTestCase {
return null;
}
}
private static class TestSuggester extends Suggester<SuggestionSearchContext.SuggestionContext> {
@Override
protected Suggestion<? extends Suggestion.Entry<? extends Suggestion.Entry.Option>> innerExecute(
String name,
SuggestionSearchContext.SuggestionContext suggestion,
IndexSearcher searcher,
CharsRefBuilder spare) throws IOException {
return null;
}
}
private static class TestSuggestionBuilder extends SuggestionBuilder<TestSuggestionBuilder> {
public static final String SUGGESTION_NAME = "test";
TestSuggestionBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {}
public static TestSuggestionBuilder fromXContent(XContentParser parser) {
return null;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
return null;
}
@Override
protected SuggestionSearchContext.SuggestionContext build(QueryShardContext context) throws IOException {
return null;
}
@Override
protected boolean doEquals(TestSuggestionBuilder other) {
return false;
}
@Override
protected int doHashCode() {
return 0;
}
@Override
public String getWriteableName() {
return "test";
}
}
private static class TestSuggestion extends Suggestion {
TestSuggestion(StreamInput in) throws IOException {
super(in);
}
@Override
protected Entry newEntry() {
return null;
}
@Override
protected Entry newEntry(StreamInput in) throws IOException {
return null;
}
@Override
public String getWriteableName() {
return "test";
}
}
}

View File

@ -56,7 +56,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null);
int nbBuckets = randomNumberOfBuckets();
int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1);
@ -137,6 +137,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
assertEquals(expectedCounts, actualCounts);
}
@Override
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215")
public void testReduceRandom() {
super.testReduceRandom();
}
@Override
protected Writeable.Reader<InternalAutoDateHistogram> instanceReader() {
return InternalAutoDateHistogram::new;

View File

@ -1,63 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestionsContext> {
public static final CustomSuggester INSTANCE = new CustomSuggester();
// This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123
@Override
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name, CustomSuggestionsContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
// Get the suggestion context
String text = suggestion.getText().utf8ToString();
// create two suggestions with 12 and 123 appended
Suggest.Suggestion<Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option>> response = new Suggest.Suggestion<>(name, suggestion.getSize());
String firstSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "12");
Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option> resultEntry12 = new Suggest.Suggestion.Entry<>(new Text(firstSuggestion), 0, text.length() + 2);
response.addTerm(resultEntry12);
String secondSuggestion = String.format(Locale.ROOT, "%s-%s-%s-%s", text, suggestion.getField(), suggestion.options.get("suffix"), "123");
Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option> resultEntry123 = new Suggest.Suggestion.Entry<>(new Text(secondSuggestion), 0, text.length() + 3);
response.addTerm(resultEntry123);
return response;
}
public static class CustomSuggestionsContext extends SuggestionSearchContext.SuggestionContext {
public Map<String, Object> options;
public CustomSuggestionsContext(QueryShardContext context, Map<String, Object> options) {
super(new CustomSuggester(), context);
this.options = options;
}
}
}

View File

@ -1,212 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.singletonList;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
/**
* Integration test for registering a custom suggester.
*/
@ClusterScope(scope= Scope.SUITE, numDataNodes =1)
public class CustomSuggesterSearchIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(CustomSuggesterPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return Arrays.asList(CustomSuggesterPlugin.class);
}
public static class CustomSuggesterPlugin extends Plugin implements SearchPlugin {
@Override
public List<SuggesterSpec<?>> getSuggesters() {
return singletonList(new SuggesterSpec<CustomSuggestionBuilder>("custom", CustomSuggestionBuilder::new,
CustomSuggestionBuilder::fromXContent));
}
}
public void testThatCustomSuggestersCanBeRegisteredAndWork() throws Exception {
createIndex("test");
client().prepareIndex("test", "test", "1").setSource(jsonBuilder()
.startObject()
.field("name", "arbitrary content")
.endObject())
.setRefreshPolicy(IMMEDIATE).get();
String randomText = randomAlphaOfLength(10);
String randomField = randomAlphaOfLength(10);
String randomSuffix = randomAlphaOfLength(10);
SuggestBuilder suggestBuilder = new SuggestBuilder();
suggestBuilder.addSuggestion("someName", new CustomSuggestionBuilder(randomField, randomSuffix).text(randomText));
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1)
.suggest(suggestBuilder);
SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
// TODO: infer type once JI-9019884 is fixed
// TODO: see also JDK-8039214
List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions =
CollectionUtils.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>iterableAsArrayList(
searchResponse.getSuggest().getSuggestion("someName"));
assertThat(suggestions, hasSize(2));
assertThat(suggestions.get(0).getText().string(),
is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix)));
assertThat(suggestions.get(1).getText().string(),
is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix)));
}
public static class CustomSuggestionBuilder extends SuggestionBuilder<CustomSuggestionBuilder> {
protected static final ParseField RANDOM_SUFFIX_FIELD = new ParseField("suffix");
private String randomSuffix;
public CustomSuggestionBuilder(String randomField, String randomSuffix) {
super(randomField);
this.randomSuffix = randomSuffix;
}
/**
* Read from a stream.
*/
public CustomSuggestionBuilder(StreamInput in) throws IOException {
super(in);
this.randomSuffix = in.readString();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(randomSuffix);
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
return builder;
}
@Override
public String getWriteableName() {
return "custom";
}
@Override
protected boolean doEquals(CustomSuggestionBuilder other) {
return Objects.equals(randomSuffix, other.randomSuffix);
}
@Override
protected int doHashCode() {
return Objects.hash(randomSuffix);
}
public static CustomSuggestionBuilder fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
String currentFieldName = null;
String fieldname = null;
String suffix = null;
String analyzer = null;
int sizeField = -1;
int shardSize = -1;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SuggestionBuilder.ANALYZER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
analyzer = parser.text();
} else if (SuggestionBuilder.FIELDNAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fieldname = parser.text();
} else if (SuggestionBuilder.SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
sizeField = parser.intValue();
} else if (SuggestionBuilder.SHARDSIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
shardSize = parser.intValue();
} else if (RANDOM_SUFFIX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
suffix = parser.text();
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"suggester[custom] doesn't support field [" + currentFieldName + "]");
}
}
// now we should have field name, check and copy fields over to the suggestion builder we return
if (fieldname == null) {
throw new ParsingException(parser.getTokenLocation(), "the required field option is missing");
}
CustomSuggestionBuilder builder = new CustomSuggestionBuilder(fieldname, suffix);
if (analyzer != null) {
builder.analyzer(analyzer);
}
if (sizeField != -1) {
builder.size(sizeField);
}
if (shardSize != -1) {
builder.shardSize(shardSize);
}
return builder;
}
@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
Map<String, Object> options = new HashMap<>();
options.put(FIELDNAME_FIELD.getPreferredName(), field());
options.put(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
CustomSuggester.CustomSuggestionsContext customSuggestionsContext =
new CustomSuggester.CustomSuggestionsContext(context, options);
customSuggestionsContext.setField(field());
assert text != null;
customSuggestionsContext.setText(BytesRefs.toBytesRef(text));
return customSuggestionsContext;
}
}
}

View File

@ -19,9 +19,14 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
@ -30,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
@ -37,6 +43,7 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.ArrayList;
@ -44,6 +51,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
@ -114,10 +122,11 @@ public class SuggestTests extends ESTestCase {
}
public void testToXContent() throws IOException {
Option option = new Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
Entry<Option> entry = new Entry<>(new Text("entryText"), 42, 313);
PhraseSuggestion.Entry.Option option = new PhraseSuggestion.Entry.Option(new Text("someText"), new Text("somethingHighlighted"),
1.3f, true);
PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entryText"), 42, 313);
entry.addOption(option);
Suggestion<Entry<Option>> suggestion = new Suggestion<>("suggestionName", 5);
PhraseSuggestion suggestion = new PhraseSuggestion("suggestionName", 5);
suggestion.addTerm(entry);
Suggest suggest = new Suggest(Collections.singletonList(suggestion));
BytesReference xContent = toXContent(suggest, XContentType.JSON, randomBoolean());
@ -196,9 +205,9 @@ public class SuggestTests extends ESTestCase {
String secondWord = randomAlphaOfLength(10);
Text suggestionText = new Text(suggestedWord + " " + secondWord);
Text highlighted = new Text("<em>" + suggestedWord + "</em> " + secondWord);
PhraseSuggestion.Entry.Option option1 = new Option(suggestionText, highlighted, 0.7f, false);
PhraseSuggestion.Entry.Option option2 = new Option(suggestionText, highlighted, 0.8f, true);
PhraseSuggestion.Entry.Option option3 = new Option(suggestionText, highlighted, 0.6f);
PhraseSuggestion.Entry.Option option1 = new PhraseSuggestion.Entry.Option(suggestionText, highlighted, 0.7f, false);
PhraseSuggestion.Entry.Option option2 = new PhraseSuggestion.Entry.Option(suggestionText, highlighted, 0.8f, true);
PhraseSuggestion.Entry.Option option3 = new PhraseSuggestion.Entry.Option(suggestionText, highlighted, 0.6f);
assertEquals(suggestionText, option1.getText());
assertEquals(highlighted, option1.getHighlighted());
assertFalse(option1.collateMatch());
@ -214,4 +223,39 @@ public class SuggestTests extends ESTestCase {
assertTrue(option1.getScore() > 0.7f);
assertTrue(option1.collateMatch());
}
public void testSerialization() throws IOException {
final Version bwcVersion = VersionUtils.randomVersionBetween(random(),
Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT);
final Suggest suggest = createTestItem();
final Suggest bwcSuggest;
NamedWriteableRegistry registry = new NamedWriteableRegistry
(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables());
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(bwcVersion);
suggest.writeTo(out);
try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) {
in.setVersion(bwcVersion);
bwcSuggest = new Suggest(in);
}
}
assertEquals(suggest, bwcSuggest);
final Suggest backAgain;
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(Version.CURRENT);
bwcSuggest.writeTo(out);
try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) {
in.setVersion(Version.CURRENT);
backAgain = new Suggest(in);
}
}
assertEquals(suggest, backAgain);
}
}

View File

@ -129,10 +129,12 @@ public class SuggestionEntryTests extends ESTestCase {
}
public void testToXContent() throws IOException {
Option option = new Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
Entry<Option> entry = new Entry<>(new Text("entryText"), 42, 313);
entry.addOption(option);
BytesReference xContent = toXContent(entry, XContentType.JSON, randomBoolean());
PhraseSuggestion.Entry.Option phraseOption = new PhraseSuggestion.Entry.Option(new Text("someText"),
new Text("somethingHighlighted"),
1.3f, true);
PhraseSuggestion.Entry phraseEntry = new PhraseSuggestion.Entry(new Text("entryText"), 42, 313);
phraseEntry.addOption(phraseOption);
BytesReference xContent = toXContent(phraseEntry, XContentType.JSON, randomBoolean());
assertEquals(
"{\"text\":\"entryText\","
+ "\"offset\":42,"
@ -144,11 +146,10 @@ public class SuggestionEntryTests extends ESTestCase {
+ "\"collate_match\":true}"
+ "]}", xContent.utf8ToString());
org.elasticsearch.search.suggest.term.TermSuggestion.Entry.Option termOption =
new org.elasticsearch.search.suggest.term.TermSuggestion.Entry.Option(new Text("termSuggestOption"), 42, 3.13f);
entry = new Entry<>(new Text("entryText"), 42, 313);
entry.addOption(termOption);
xContent = toXContent(entry, XContentType.JSON, randomBoolean());
TermSuggestion.Entry.Option termOption = new TermSuggestion.Entry.Option(new Text("termSuggestOption"), 42, 3.13f);
TermSuggestion.Entry termEntry = new TermSuggestion.Entry(new Text("entryText"), 42, 313);
termEntry.addOption(termOption);
xContent = toXContent(termEntry, XContentType.JSON, randomBoolean());
assertEquals(
"{\"text\":\"entryText\","
+ "\"offset\":42,"
@ -159,12 +160,11 @@ public class SuggestionEntryTests extends ESTestCase {
+ "\"freq\":42}"
+ "]}", xContent.utf8ToString());
org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option completionOption =
new org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option(-1, new Text("completionOption"),
CompletionSuggestion.Entry.Option completionOption = new CompletionSuggestion.Entry.Option(-1, new Text("completionOption"),
3.13f, Collections.singletonMap("key", Collections.singleton("value")));
entry = new Entry<>(new Text("entryText"), 42, 313);
entry.addOption(completionOption);
xContent = toXContent(entry, XContentType.JSON, randomBoolean());
CompletionSuggestion.Entry completionEntry = new CompletionSuggestion.Entry(new Text("entryText"), 42, 313);
completionEntry.addOption(completionOption);
xContent = toXContent(completionEntry, XContentType.JSON, randomBoolean());
assertEquals(
"{\"text\":\"entryText\","
+ "\"offset\":42,"

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -41,7 +42,7 @@ public class SuggestionOptionTests extends ESTestCase {
float score = randomFloat();
Text highlighted = randomFrom((Text) null, new Text(randomAlphaOfLengthBetween(5, 15)));
Boolean collateMatch = randomFrom((Boolean) null, randomBoolean());
return new Option(text, highlighted, score, collateMatch);
return new PhraseSuggestion.Entry.Option(text, highlighted, score, collateMatch);
}
public void testFromXContent() throws IOException {
@ -66,7 +67,7 @@ public class SuggestionOptionTests extends ESTestCase {
Option parsed;
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = Option.fromXContent(parser);
parsed = PhraseSuggestion.Entry.Option.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
@ -78,7 +79,7 @@ public class SuggestionOptionTests extends ESTestCase {
}
public void testToXContent() throws IOException {
Option option = new Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
Option option = new PhraseSuggestion.Entry.Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
BytesReference xContent = toXContent(option, XContentType.JSON, randomBoolean());
assertEquals("{\"text\":\"someText\","
+ "\"highlighted\":\"somethingHighlighted\","

View File

@ -188,14 +188,15 @@ public class SuggestionTests extends ESTestCase {
public void testToXContent() throws IOException {
ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
{
Option option = new Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
Entry<Option> entry = new Entry<>(new Text("entryText"), 42, 313);
PhraseSuggestion.Entry.Option option = new PhraseSuggestion.Entry.Option(new Text("someText"), new Text("somethingHighlighted"),
1.3f, true);
PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entryText"), 42, 313);
entry.addOption(option);
Suggestion<Entry<Option>> suggestion = new Suggestion<>("suggestionName", 5);
PhraseSuggestion suggestion = new PhraseSuggestion("suggestionName", 5);
suggestion.addTerm(entry);
BytesReference xContent = toXContent(suggestion, XContentType.JSON, params, randomBoolean());
assertEquals(
"{\"suggestion#suggestionName\":[{"
"{\"phrase#suggestionName\":[{"
+ "\"text\":\"entryText\","
+ "\"offset\":42,"
+ "\"length\":313,"
@ -208,7 +209,8 @@ public class SuggestionTests extends ESTestCase {
+ "}", xContent.utf8ToString());
}
{
Option option = new Option(new Text("someText"), new Text("somethingHighlighted"), 1.3f, true);
PhraseSuggestion.Entry.Option option = new PhraseSuggestion.Entry.Option(new Text("someText"), new Text("somethingHighlighted"),
1.3f, true);
PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entryText"), 42, 313, 1.0);
entry.addOption(option);
PhraseSuggestion suggestion = new PhraseSuggestion("suggestionName", 5);

View File

@ -460,7 +460,7 @@ public abstract class EngineTestCase extends ESTestCase {
TimeValue.timeValueMinutes(5), refreshListenerList, Collections.emptyList(), indexSort, handler,
new NoneCircuitBreakerService(),
globalCheckpointSupplier == null ?
new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED) :
new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED, update -> {}) :
globalCheckpointSupplier, primaryTerm::get);
return config;
}

View File

@ -2008,8 +2008,10 @@ public final class InternalTestCluster extends TestCluster {
final CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class, nodeAndClient.node);
CircuitBreaker fdBreaker = breakerService.getBreaker(CircuitBreaker.FIELDDATA);
assertThat("Fielddata breaker not reset to 0 on node: " + name, fdBreaker.getUsed(), equalTo(0L));
CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING);
assertThat("Accounting breaker not reset to 0 on node: " + name, acctBreaker.getUsed(), equalTo(0L));
// TODO: This is commented out while Lee looks into the failures
// See: https://github.com/elastic/elasticsearch/issues/30290
// CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING);
// assertThat("Accounting breaker not reset to 0 on node: " + name, acctBreaker.getUsed(), equalTo(0L));
// Anything that uses transport or HTTP can increase the
// request breaker (because they use bigarrays), because of
// that the breaker can sometimes be incremented from ping

View File

@ -40,9 +40,11 @@ import org.elasticsearch.nio.NioServerSocketChannel;
import org.elasticsearch.nio.NioSocketChannel;
import org.elasticsearch.nio.ServerChannelContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ConnectionProfile;
import org.elasticsearch.transport.TcpChannel;
import org.elasticsearch.transport.TcpServerChannel;
import org.elasticsearch.transport.TcpTransport;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.Transports;
import java.io.IOException;
@ -51,6 +53,8 @@ import java.net.StandardSocketOptions;
import java.nio.ByteBuffer;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Consumer;
import java.util.function.Supplier;
@ -128,6 +132,34 @@ public class MockNioTransport extends TcpTransport {
profileToChannelFactory.clear();
}
@Override
protected ConnectionProfile resolveConnectionProfile(ConnectionProfile connectionProfile) {
ConnectionProfile resolvedProfile = resolveConnectionProfile(connectionProfile, defaultConnectionProfile);
if (resolvedProfile.getNumConnections() <= 3) {
return resolvedProfile;
}
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
Set<TransportRequestOptions.Type> allTypesWithConnection = new HashSet<>();
Set<TransportRequestOptions.Type> allTypesWithoutConnection = new HashSet<>();
for (TransportRequestOptions.Type type : TransportRequestOptions.Type.values()) {
int numConnections = resolvedProfile.getNumConnectionsPerType(type);
if (numConnections > 0) {
allTypesWithConnection.add(type);
} else {
allTypesWithoutConnection.add(type);
}
}
// make sure we maintain at least the types that are supported by this profile even if we only use a single channel for them.
builder.addConnections(3, allTypesWithConnection.toArray(new TransportRequestOptions.Type[0]));
if (allTypesWithoutConnection.isEmpty() == false) {
builder.addConnections(0, allTypesWithoutConnection.toArray(new TransportRequestOptions.Type[0]));
}
builder.setHandshakeTimeout(resolvedProfile.getHandshakeTimeout());
builder.setConnectTimeout(resolvedProfile.getConnectTimeout());
return builder.build();
}
private void exceptionCaught(NioSocketChannel channel, Exception exception) {
onException((TcpChannel) channel, exception);
}

View File

@ -94,6 +94,11 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase
return transportService;
}
@Override
protected int channelsPerNodeConnection() {
return 3;
}
@Override
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;

Some files were not shown because too many files have changed in this diff Show More