Merge remote-tracking branch 'origin/master' into feature/client_aggs_parsing

# Conflicts:
#	core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTermsTestCase.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBoundsTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroidTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/InternalMinTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesRanksTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/sum/InternalSumTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCountTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java
#	core/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java
#	test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java
This commit is contained in:
Tanguy Leroux 2017-05-10 13:40:39 +02:00
commit bf718a686f
94 changed files with 2808 additions and 456 deletions

View File

@ -750,7 +750,7 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]validate[/\\]SimpleValidateQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]versioning[/\\]SimpleVersioningIT.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngineService.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngine.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]StoredExpressionTests.java" checks="LineLength" />

View File

@ -57,7 +57,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
}
@Override
public final Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions,
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions,
boolean transpositions) {
failIfNotIndexed();
return new FuzzyQuery(new Term(name(), indexedValueForSearch(value)),
@ -65,7 +65,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
}
@Override
public final Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) {
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) {
failIfNotIndexed();
PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value)));
if (method != null) {
@ -75,7 +75,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
}
@Override
public final Query regexpQuery(String value, int flags, int maxDeterminizedStates,
public Query regexpQuery(String value, int flags, int maxDeterminizedStates,
MultiTermQuery.RewriteMethod method, QueryShardContext context) {
failIfNotIndexed();
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);

View File

@ -21,7 +21,7 @@ package org.elasticsearch.plugins;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import java.util.Collections;
import java.util.List;
@ -32,9 +32,9 @@ import java.util.List;
public interface ScriptPlugin {
/**
* Returns a {@link ScriptEngineService} instance or <code>null</code> if this plugin doesn't add a new script engine
* Returns a {@link ScriptEngine} instance or <code>null</code> if this plugin doesn't add a new script engine
*/
default ScriptEngineService getScriptEngineService(Settings settings) {
default ScriptEngine getScriptEngine(Settings settings) {
return null;
}

View File

@ -33,13 +33,13 @@ import static java.util.Collections.unmodifiableMap;
/**
* A native script engine service.
*/
public class NativeScriptEngineService extends AbstractComponent implements ScriptEngineService {
public class NativeScriptEngine extends AbstractComponent implements ScriptEngine {
public static final String NAME = "native";
private final Map<String, NativeScriptFactory> scripts;
public NativeScriptEngineService(Settings settings, Map<String, NativeScriptFactory> scripts) {
public NativeScriptEngine(Settings settings, Map<String, NativeScriptFactory> scripts) {
super(settings);
this.scripts = unmodifiableMap(scripts);
}

View File

@ -25,10 +25,19 @@ import org.elasticsearch.search.lookup.SearchLookup;
import java.io.Closeable;
import java.util.Map;
public interface ScriptEngineService extends Closeable {
/**
* A script language implementation.
*/
public interface ScriptEngine extends Closeable {
/**
* The language name used in the script APIs to refer to this scripting backend.
*/
String getType();
/**
* The extension for file scripts in this language.
*/
String getExtension();
/**

View File

@ -21,23 +21,21 @@ package org.elasticsearch.script;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.elasticsearch.common.Strings;
public class ScriptEngineRegistry {
private final Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices;
private final Map<String, ScriptEngineService> registeredLanguages;
private final Map<Class<? extends ScriptEngine>, String> registeredScriptEngineServices;
private final Map<String, ScriptEngine> registeredLanguages;
private final Map<String, Boolean> defaultInlineScriptEnableds;
public ScriptEngineRegistry(Iterable<ScriptEngineService> registrations) {
public ScriptEngineRegistry(Iterable<ScriptEngine> registrations) {
Objects.requireNonNull(registrations);
Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices = new HashMap<>();
Map<String, ScriptEngineService> registeredLanguages = new HashMap<>();
Map<Class<? extends ScriptEngine>, String> registeredScriptEngineServices = new HashMap<>();
Map<String, ScriptEngine> registeredLanguages = new HashMap<>();
Map<String, Boolean> inlineScriptEnableds = new HashMap<>();
for (ScriptEngineService service : registrations) {
for (ScriptEngine service : registrations) {
String oldLanguage = registeredScriptEngineServices.putIfAbsent(service.getClass(),
service.getType());
if (oldLanguage != null) {
@ -45,11 +43,11 @@ public class ScriptEngineRegistry {
"] already registered for language [" + oldLanguage + "]");
}
String language = service.getType();
ScriptEngineService scriptEngineService =
ScriptEngine scriptEngine =
registeredLanguages.putIfAbsent(language, service);
if (scriptEngineService != null) {
if (scriptEngine != null) {
throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" +
scriptEngineService.getClass().getCanonicalName() + "]");
scriptEngine.getClass().getCanonicalName() + "]");
}
inlineScriptEnableds.put(language, service.isInlineScriptEnabled());
}
@ -59,16 +57,16 @@ public class ScriptEngineRegistry {
this.defaultInlineScriptEnableds = Collections.unmodifiableMap(inlineScriptEnableds);
}
Iterable<Class<? extends ScriptEngineService>> getRegisteredScriptEngineServices() {
Iterable<Class<? extends ScriptEngine>> getRegisteredScriptEngineServices() {
return registeredScriptEngineServices.keySet();
}
String getLanguage(Class<? extends ScriptEngineService> scriptEngineService) {
String getLanguage(Class<? extends ScriptEngine> scriptEngineService) {
Objects.requireNonNull(scriptEngineService);
return registeredScriptEngineServices.get(scriptEngineService);
}
public Map<String, ScriptEngineService> getRegisteredLanguages() {
public Map<String, ScriptEngine> getRegisteredLanguages() {
return registeredLanguages;
}

View File

@ -57,7 +57,7 @@ public class ScriptModes {
*/
public boolean getScriptEnabled(String lang, ScriptType scriptType, ScriptContext scriptContext) {
//native scripts are always enabled as they are static by definition
if (NativeScriptEngineService.NAME.equals(lang)) {
if (NativeScriptEngine.NAME.equals(lang)) {
return true;
}
Boolean scriptMode = scriptEnabled.get(getKey(lang, scriptType, scriptContext));

View File

@ -27,8 +27,6 @@ import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -50,23 +48,23 @@ public class ScriptModule {
ResourceWatcherService resourceWatcherService, List<ScriptPlugin> scriptPlugins) {
Map<String, NativeScriptFactory> factoryMap = scriptPlugins.stream().flatMap(x -> x.getNativeScripts().stream())
.collect(Collectors.toMap(NativeScriptFactory::getName, Function.identity()));
NativeScriptEngineService nativeScriptEngineService = new NativeScriptEngineService(settings, factoryMap);
List<ScriptEngineService> scriptEngineServices = scriptPlugins.stream().map(x -> x.getScriptEngineService(settings))
NativeScriptEngine nativeScriptEngineService = new NativeScriptEngine(settings, factoryMap);
List<ScriptEngine> scriptEngines = scriptPlugins.stream().map(x -> x.getScriptEngine(settings))
.filter(Objects::nonNull).collect(Collectors.toList());
scriptEngineServices.add(nativeScriptEngineService);
scriptEngines.add(nativeScriptEngineService);
List<ScriptContext.Plugin> plugins = scriptPlugins.stream().map(x -> x.getCustomScriptContexts()).filter(Objects::nonNull)
.collect(Collectors.toList());
return new ScriptModule(settings, environment, resourceWatcherService, scriptEngineServices, plugins);
return new ScriptModule(settings, environment, resourceWatcherService, scriptEngines, plugins);
}
/**
* Build {@linkplain ScriptEngineService} and {@linkplain ScriptContext.Plugin}.
* Build {@linkplain ScriptEngine} and {@linkplain ScriptContext.Plugin}.
*/
public ScriptModule(Settings settings, Environment environment,
ResourceWatcherService resourceWatcherService, List<ScriptEngineService> scriptEngineServices,
ResourceWatcherService resourceWatcherService, List<ScriptEngine> scriptEngines,
List<ScriptContext.Plugin> customScriptContexts) {
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineServices);
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngines);
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
try {
scriptService = new ScriptService(settings, environment, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry,

View File

@ -92,9 +92,9 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
public static final Setting<Integer> SCRIPT_MAX_COMPILATIONS_PER_MINUTE =
Setting.intSetting("script.max_compilations_per_minute", 15, 0, Property.Dynamic, Property.NodeScope);
private final Collection<ScriptEngineService> scriptEngines;
private final Map<String, ScriptEngineService> scriptEnginesByLang;
private final Map<String, ScriptEngineService> scriptEnginesByExt;
private final Collection<ScriptEngine> scriptEngines;
private final Map<String, ScriptEngine> scriptEnginesByLang;
private final Map<String, ScriptEngine> scriptEnginesByExt;
private final ConcurrentMap<CacheKey, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
@ -142,9 +142,9 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire);
this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build();
Map<String, ScriptEngineService> enginesByLangBuilder = new HashMap<>();
Map<String, ScriptEngineService> enginesByExtBuilder = new HashMap<>();
for (ScriptEngineService scriptEngine : scriptEngines) {
Map<String, ScriptEngine> enginesByLangBuilder = new HashMap<>();
Map<String, ScriptEngine> enginesByExtBuilder = new HashMap<>();
for (ScriptEngine scriptEngine : scriptEngines) {
String language = scriptEngineRegistry.getLanguage(scriptEngine.getClass());
enginesByLangBuilder.put(language, scriptEngine);
enginesByExtBuilder.put(scriptEngine.getExtension(), scriptEngine);
@ -183,20 +183,20 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
IOUtils.close(scriptEngines);
}
private ScriptEngineService getScriptEngineServiceForLang(String lang) {
ScriptEngineService scriptEngineService = scriptEnginesByLang.get(lang);
if (scriptEngineService == null) {
private ScriptEngine getScriptEngineServiceForLang(String lang) {
ScriptEngine scriptEngine = scriptEnginesByLang.get(lang);
if (scriptEngine == null) {
throw new IllegalArgumentException("script_lang not supported [" + lang + "]");
}
return scriptEngineService;
return scriptEngine;
}
private ScriptEngineService getScriptEngineServiceForFileExt(String fileExtension) {
ScriptEngineService scriptEngineService = scriptEnginesByExt.get(fileExtension);
if (scriptEngineService == null) {
private ScriptEngine getScriptEngineServiceForFileExt(String fileExtension) {
ScriptEngine scriptEngine = scriptEnginesByExt.get(fileExtension);
if (scriptEngine == null) {
throw new IllegalArgumentException("script file extension not supported [" + fileExtension + "]");
}
return scriptEngineService;
return scriptEngine;
}
void setMaxCompilationsPerMinute(Integer newMaxPerMinute) {
@ -258,7 +258,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
}
ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang);
ScriptEngine scriptEngine = getScriptEngineServiceForLang(lang);
if (canExecuteScript(lang, type, scriptContext) == false) {
throw new IllegalStateException("scripts of type [" + script.getType() + "]," +
@ -304,7 +304,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
}
// Check whether too many compilations have happened
checkCompilationLimit();
compiledScript = new CompiledScript(type, id, lang, scriptEngineService.compile(id, idOrCode, options));
compiledScript = new CompiledScript(type, id, lang, scriptEngine.compile(id, idOrCode, options));
} catch (ScriptException good) {
// TODO: remove this try-catch completely, when all script engines have good exceptions!
throw good; // its already good
@ -404,10 +404,10 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
}
try {
ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(source.getLang());
ScriptEngine scriptEngine = getScriptEngineServiceForLang(source.getLang());
if (isAnyScriptContextEnabled(source.getLang(), ScriptType.STORED)) {
Object compiled = scriptEngineService.compile(request.id(), source.getCode(), Collections.emptyMap());
Object compiled = scriptEngine.compile(request.id(), source.getCode(), Collections.emptyMap());
if (compiled == null) {
throw new IllegalArgumentException("failed to parse/compile stored script [" + request.id() + "]" +
@ -528,7 +528,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
/**
* A small listener for the script cache that calls each
* {@code ScriptEngineService}'s {@code scriptRemoved} method when the
* {@code ScriptEngine}'s {@code scriptRemoved} method when the
* script has been removed from the cache
*/
private class ScriptCacheRemovalListener implements RemovalListener<CacheKey, CompiledScript> {
@ -571,7 +571,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
logger.trace("Loading script file : [{}]", file);
}
ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
ScriptEngine engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
if (engineService == null) {
logger.warn("No script engine found for [{}]", scriptNameExt.v2());
} else {
@ -629,7 +629,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
public void onFileDeleted(Path file) {
Tuple<String, String> scriptNameExt = getScriptNameExt(file);
if (scriptNameExt != null) {
ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
ScriptEngine engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
assert engineService != null;
logger.info("removing script file [{}]", file.toAbsolutePath());
staticCache.remove(new CacheKey(engineService.getType(), scriptNameExt.v1(), null));

View File

@ -72,8 +72,8 @@ public class ScriptSettings {
ScriptContextRegistry scriptContextRegistry) {
final List<Setting<Boolean>> scriptModeSettings = new ArrayList<>();
for (final Class<? extends ScriptEngineService> scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) {
if (scriptEngineService == NativeScriptEngineService.class) {
for (final Class<? extends ScriptEngine> scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) {
if (scriptEngineService == NativeScriptEngine.class) {
// native scripts are always enabled, and their settings can not be changed
continue;
}

View File

@ -37,7 +37,7 @@ public enum ScriptType implements Writeable {
* INLINE scripts are specified in numerous queries and compiled on-the-fly.
* They will be cached based on the lang and code of the script.
* They are turned off by default because most languages are insecure
* (Groovy and others), but can be overridden by the specific {@link ScriptEngineService}
* (Groovy and others), but can be overridden by the specific {@link ScriptEngine}
* if the language is naturally secure (Painless, Mustache, and Expressions).
*/
INLINE ( 0 , new ParseField("inline") , false ),
@ -46,7 +46,7 @@ public enum ScriptType implements Writeable {
* STORED scripts are saved as part of the {@link org.elasticsearch.cluster.ClusterState}
* based on user requests. They will be cached when they are first used in a query.
* They are turned off by default because most languages are insecure
* (Groovy and others), but can be overridden by the specific {@link ScriptEngineService}
* (Groovy and others), but can be overridden by the specific {@link ScriptEngine}
* if the language is naturally secure (Painless, Mustache, and Expressions).
*/
STORED ( 1 , new ParseField("stored", "id") , false ),
@ -123,7 +123,7 @@ public enum ScriptType implements Writeable {
/**
* @return Whether or not a {@link ScriptType} can be run by default. Note
* this can be potentially overridden by any {@link ScriptEngineService}.
* this can be potentially overridden by any {@link ScriptEngine}.
*/
public boolean isDefaultEnabled() {
return defaultEnabled;

View File

@ -34,6 +34,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalRange<B extends InternalRange.Bucket, R extends InternalRange<B, R>> extends InternalMultiBucketAggregation<R, B>
implements Range {
@ -172,6 +173,27 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
Bucket that = (Bucket) other;
return Objects.equals(from, that.from)
&& Objects.equals(to, that.to)
&& Objects.equals(docCount, that.docCount)
&& Objects.equals(aggregations, that.aggregations)
&& Objects.equals(key, that.key);
}
@Override
public int hashCode() {
return Objects.hash(getClass(), from, to, docCount, aggregations, key);
}
}
public static class Factory<B extends Bucket, R extends InternalRange<B, R>> {
@ -245,8 +267,8 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
out.writeVInt(ranges.size());
for (B bucket : ranges) {
out.writeOptionalString(((Bucket) bucket).key);
out.writeDouble(((Bucket) bucket).from);
out.writeDouble(((Bucket) bucket).to);
out.writeDouble(bucket.from);
out.writeDouble(bucket.to);
out.writeVLong(((Bucket) bucket).docCount);
bucket.aggregations.writeTo(out);
}
@ -317,4 +339,16 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
return builder;
}
@Override
protected int doHashCode() {
return Objects.hash(ranges, format, keyed);
}
@Override
protected boolean doEquals(Object obj) {
InternalRange<?,?> that = (InternalRange<?,?>) obj;
return Objects.equals(ranges, that.ranges)
&& Objects.equals(format, that.format)
&& Objects.equals(keyed, that.keyed);
}
}

View File

@ -47,11 +47,11 @@ public class NativeScriptTests extends ESTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
ScriptModule scriptModule = new ScriptModule(settings, new Environment(settings), null,
singletonList(new NativeScriptEngineService(settings, singletonMap("my", new MyNativeScriptFactory()))), emptyList());
singletonList(new NativeScriptEngine(settings, singletonMap("my", new MyNativeScriptFactory()))), emptyList());
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
Script script = new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "my", Collections.emptyMap());
Script script = new Script(ScriptType.INLINE, NativeScriptEngine.NAME, "my", Collections.emptyMap());
CompiledScript compiledScript = scriptModule.getScriptService().compile(script, ScriptContext.Standard.SEARCH);
ExecutableScript executable = scriptModule.getScriptService().executable(compiledScript, script.getParams());
assertThat(executable.run().toString(), equalTo("test"));
@ -71,7 +71,7 @@ public class NativeScriptTests extends ESTestCase {
ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, null);
Map<String, NativeScriptFactory> nativeScriptFactoryMap = new HashMap<>();
nativeScriptFactoryMap.put("my", new MyNativeScriptFactory());
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new NativeScriptEngineService(settings,
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new NativeScriptEngine(settings,
nativeScriptFactoryMap)));
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<>());
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
@ -79,7 +79,7 @@ public class NativeScriptTests extends ESTestCase {
scriptContextRegistry, scriptSettings);
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
assertThat(scriptService.compile(new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "my", Collections.emptyMap()),
assertThat(scriptService.compile(new Script(ScriptType.INLINE, NativeScriptEngine.NAME, "my", Collections.emptyMap()),
scriptContext), notNullValue());
}
}

View File

@ -43,7 +43,7 @@ public class ScriptModesTests extends ESTestCase {
ScriptSettings scriptSettings;
ScriptContextRegistry scriptContextRegistry;
private ScriptContext[] scriptContexts;
private Map<String, ScriptEngineService> scriptEngines;
private Map<String, ScriptEngine> scriptEngines;
private ScriptModes scriptModes;
private Set<String> checkedSettings;
private boolean assertAllSettingsWereChecked;
@ -65,8 +65,8 @@ public class ScriptModesTests extends ESTestCase {
scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]);
scriptEngines = buildScriptEnginesByLangMap(newHashSet(
//add the native engine just to make sure it gets filtered out
new NativeScriptEngineService(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()),
new CustomScriptEngineService()));
new NativeScriptEngine(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()),
new CustomScriptEngine()));
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngines.values());
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
checkedSettings = new HashSet<>();
@ -77,7 +77,7 @@ public class ScriptModesTests extends ESTestCase {
@After
public void assertNativeScriptsAreAlwaysAllowed() {
if (assertScriptModesNonNull) {
assertThat(scriptModes.getScriptEnabled(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true));
assertThat(scriptModes.getScriptEnabled(NativeScriptEngine.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true));
}
}
@ -216,16 +216,16 @@ public class ScriptModesTests extends ESTestCase {
return copy.values().toArray(new ScriptContext[copy.size()]);
}
static Map<String, ScriptEngineService> buildScriptEnginesByLangMap(Set<ScriptEngineService> scriptEngines) {
Map<String, ScriptEngineService> builder = new HashMap<>();
for (ScriptEngineService scriptEngine : scriptEngines) {
static Map<String, ScriptEngine> buildScriptEnginesByLangMap(Set<ScriptEngine> scriptEngines) {
Map<String, ScriptEngine> builder = new HashMap<>();
for (ScriptEngine scriptEngine : scriptEngines) {
String type = scriptEngine.getType();
builder.put(type, scriptEngine);
}
return unmodifiableMap(builder);
}
private static class CustomScriptEngineService implements ScriptEngineService {
private static class CustomScriptEngine implements ScriptEngine {
public static final String NAME = "custom";

View File

@ -55,9 +55,9 @@ import static org.hamcrest.Matchers.sameInstance;
public class ScriptServiceTests extends ESTestCase {
private ResourceWatcherService resourceWatcherService;
private ScriptEngineService scriptEngineService;
private ScriptEngineService dangerousScriptEngineService;
private Map<String, ScriptEngineService> scriptEnginesByLangMap;
private ScriptEngine scriptEngine;
private ScriptEngine dangerousScriptEngine;
private Map<String, ScriptEngine> scriptEnginesByLangMap;
private ScriptEngineRegistry scriptEngineRegistry;
private ScriptContextRegistry scriptContextRegistry;
private ScriptSettings scriptSettings;
@ -83,11 +83,11 @@ public class ScriptServiceTests extends ESTestCase {
.put(ScriptService.SCRIPT_MAX_COMPILATIONS_PER_MINUTE.getKey(), 10000)
.build();
resourceWatcherService = new ResourceWatcherService(baseSettings, null);
scriptEngineService = new TestEngineService();
dangerousScriptEngineService = new TestDangerousEngineService();
TestEngineService defaultScriptServiceEngine = new TestEngineService(Script.DEFAULT_SCRIPT_LANG) {};
scriptEngine = new TestEngine();
dangerousScriptEngine = new TestDangerousEngine();
TestEngine defaultScriptServiceEngine = new TestEngine(Script.DEFAULT_SCRIPT_LANG) {};
scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(
new HashSet<>(Arrays.asList(scriptEngineService, defaultScriptServiceEngine)));
new HashSet<>(Arrays.asList(scriptEngine, defaultScriptServiceEngine)));
//randomly register custom script contexts
int randomInt = randomIntBetween(0, 3);
//prevent duplicates using map
@ -104,7 +104,7 @@ public class ScriptServiceTests extends ESTestCase {
String context = plugin + "_" + operation;
contexts.put(context, new ScriptContext.Plugin(plugin, operation));
}
scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineService, dangerousScriptEngineService,
scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngine, dangerousScriptEngine,
defaultScriptServiceEngine));
scriptContextRegistry = new ScriptContextRegistry(contexts.values());
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
@ -259,7 +259,7 @@ public class ScriptServiceTests extends ESTestCase {
do {
ScriptType scriptType = randomFrom(ScriptType.values());
ScriptContext scriptContext = randomFrom(this.scriptContexts);
settingKey = scriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey();
settingKey = scriptEngine.getType() + "." + scriptType + "." + scriptContext.getKey();
} while (engineSettings.containsKey(settingKey));
engineSettings.put(settingKey, randomBoolean());
}
@ -301,7 +301,7 @@ public class ScriptServiceTests extends ESTestCase {
String script = scriptType == ScriptType.FILE ? "file_script" : "script";
for (ScriptContext scriptContext : this.scriptContexts) {
//fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings
Boolean scriptEnabled = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey());
Boolean scriptEnabled = engineSettings.get(dangerousScriptEngine.getType() + "." + scriptType + "." + scriptContext.getKey());
if (scriptEnabled == null) {
scriptEnabled = scriptContextSettings.get(scriptContext);
}
@ -312,7 +312,7 @@ public class ScriptServiceTests extends ESTestCase {
scriptEnabled = DEFAULT_SCRIPT_ENABLED.get(scriptType);
}
String lang = dangerousScriptEngineService.getType();
String lang = dangerousScriptEngine.getType();
if (scriptEnabled) {
assertCompileAccepted(lang, script, scriptType, scriptContext);
} else {
@ -332,7 +332,7 @@ public class ScriptServiceTests extends ESTestCase {
unknownContext = randomAlphaOfLength(randomIntBetween(1, 30));
} while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext)));
String type = scriptEngineService.getType();
String type = scriptEngine.getType();
try {
scriptService.compile(new Script(randomFrom(ScriptType.values()), type, "test", Collections.emptyMap()),
new ScriptContext.Plugin(pluginName, unknownContext));
@ -482,17 +482,17 @@ public class ScriptServiceTests extends ESTestCase {
);
}
public static class TestEngineService implements ScriptEngineService {
public static class TestEngine implements ScriptEngine {
public static final String NAME = "test";
private final String name;
public TestEngineService() {
public TestEngine() {
this(NAME);
}
public TestEngineService(String name) {
public TestEngine(String name) {
this.name = name;
}
@ -532,7 +532,7 @@ public class ScriptServiceTests extends ESTestCase {
}
}
public static class TestDangerousEngineService implements ScriptEngineService {
public static class TestDangerousEngine implements ScriptEngine {
public static final String NAME = "dtest";

View File

@ -29,14 +29,13 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class ScriptSettingsTests extends ESTestCase {
public void testSettingsAreProperlyPropogated() {
ScriptEngineRegistry scriptEngineRegistry =
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngine()));
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
boolean enabled = randomBoolean();
@ -50,7 +49,7 @@ public class ScriptSettingsTests extends ESTestCase {
}
}
private static class CustomScriptEngineService implements ScriptEngineService {
private static class CustomScriptEngine implements ScriptEngine {
public static final String NAME = "custom";

View File

@ -20,11 +20,11 @@
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.List;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.IndexWriter;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.HashMap;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.joda.time.DateTime;
import java.util.ArrayList;

View File

@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.List;

View File

@ -22,9 +22,9 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.ArrayList;

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public abstract class InternalRangeTestCase<T extends InternalAggregation & Range> extends InternalAggregationTestCase<T> {
private boolean keyed;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
}
@Override
protected T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, keyed);
}
protected abstract T createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
boolean keyed);
@Override
protected void assertReduced(T reduced, List<T> inputs) {
final Map<String, Long> expectedCounts = new TreeMap<>();
for (T input : inputs) {
for (Range.Bucket bucket : input.getBuckets()) {
expectedCounts.compute(bucket.getKeyAsString(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
}
final Map<String, Long> actualCounts = new TreeMap<>();
for (Range.Bucket bucket : reduced.getBuckets()) {
actualCounts.compute(bucket.getKeyAsString(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
private DocValueFormat format;
private List<Tuple<Double, Double>> ranges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
format = randomNumericDocValueFormat();
final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = 1;//randomIntBetween(1, 10);
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
for (int i = 0; i < numRanges; i++) {
double from = i * interval;
double to = from + interval;
listOfRanges.add(Tuple.tuple(from, to));
}
if (randomBoolean()) {
// Add some overlapping ranges
double max = (double) numRanges * interval;
listOfRanges.add(Tuple.tuple(0.0, max));
listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
}
ranges = Collections.unmodifiableList(listOfRanges);
}
@Override
protected InternalRange createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
boolean keyed) {
final List<InternalRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < ranges.size(); ++i) {
Tuple<Double, Double> range = ranges.get(i);
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add( new InternalRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format));
}
return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, Collections.emptyMap());
}
@Override
protected Writeable.Reader<InternalRange> instanceReader() {
return InternalRange::new;
}
}

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRange> {
private DocValueFormat format;
private List<Tuple<Double, Double>> dateRanges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
format = randomNumericDocValueFormat();
Function<DateTime, DateTime> interval = randomFrom(dateTime -> dateTime.plusSeconds(1), dateTime -> dateTime.plusMinutes(1),
dateTime -> dateTime.plusHours(1), dateTime -> dateTime.plusDays(1), dateTime -> dateTime.plusMonths(1), dateTime ->
dateTime.plusYears(1));
final int numRanges = randomIntBetween(1, 10);
final List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
DateTime date = new DateTime(DateTimeZone.UTC);
double start = date.getMillis();
double end = 0;
for (int i = 0; i < numRanges; i++) {
double from = date.getMillis();
date = interval.apply(date);
double to = date.getMillis();
listOfRanges.add(Tuple.tuple(from, to));
if (to > end) {
end = to;
}
}
if (randomBoolean()) {
final int randomOverlaps = randomIntBetween(1, 5);
for (int i = 0; i < randomOverlaps; i++) {
listOfRanges.add(Tuple.tuple(start, randomDoubleBetween(start, end, false)));
}
}
dateRanges = Collections.unmodifiableList(listOfRanges);
}
@Override
protected InternalDateRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
boolean keyed) {
final List<InternalDateRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < dateRanges.size(); ++i) {
Tuple<Double, Double> range = dateRanges.get(i);
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add( new InternalDateRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format));
}
return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData);
}
@Override
protected Writeable.Reader<InternalDateRange> instanceReader() {
return InternalDateRange::new;
}
}

View File

@ -0,0 +1,81 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoDistance> {
private List<Tuple<Double, Double>> geoDistanceRanges;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = randomIntBetween(1, 10);
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
for (int i = 0; i < numRanges; i++) {
double from = i * interval;
double to = from + interval;
listOfRanges.add(Tuple.tuple(from, to));
}
if (randomBoolean()) {
// Add some overlapping ranges
double max = (double) numRanges * interval;
listOfRanges.add(Tuple.tuple(0.0, max));
listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
}
geoDistanceRanges = Collections.unmodifiableList(listOfRanges);
}
@Override
protected Writeable.Reader<InternalGeoDistance> instanceReader() {
return InternalGeoDistance::new;
}
@Override
protected InternalGeoDistance createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
boolean keyed) {
final List<InternalGeoDistance.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < geoDistanceRanges.size(); ++i) {
Tuple<Double, Double> range = geoDistanceRanges.get(i);
int docCount = randomIntBetween(0, 1000);
double from = range.v1();
double to = range.v2();
buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed));
}
return new InternalGeoDistance(name, buckets, keyed, pipelineAggregators, metaData);
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Arrays;
import java.util.HashMap;

View File

@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.HashMap;
import java.util.List;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.InternalAggregation;
@ -397,7 +397,7 @@ public class AvgIT extends AbstractNumericTestCase {
*/
public static class ExtractFieldScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new ExtractFieldScriptEngine();
}
}
@ -405,7 +405,7 @@ public class AvgIT extends AbstractNumericTestCase {
/**
* This mock script returns the field that is specified by name in the script body
*/
public static class ExtractFieldScriptEngine implements ScriptEngineService {
public static class ExtractFieldScriptEngine implements ScriptEngine {
public static final String NAME = "extract_field";
@ -502,7 +502,7 @@ public class AvgIT extends AbstractNumericTestCase {
*/
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new FieldValueScriptEngine();
}
}
@ -510,7 +510,7 @@ public class AvgIT extends AbstractNumericTestCase {
/**
* This mock script returns the field value and adds one month to the returned date
*/
public static class FieldValueScriptEngine implements ScriptEngineService {
public static class FieldValueScriptEngine implements ScriptEngine {
public static final String NAME = "field_value";

View File

@ -21,12 +21,12 @@ package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds;
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.List;

View File

@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.max.ParsedMax;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -20,11 +20,11 @@ package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.InternalAggregation;
@ -396,7 +396,7 @@ public class SumIT extends AbstractNumericTestCase {
*/
public static class ExtractFieldScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new ExtractFieldScriptEngine();
}
}
@ -405,7 +405,7 @@ public class SumIT extends AbstractNumericTestCase {
* This mock script returns the field that is specified by name in the
* script body
*/
public static class ExtractFieldScriptEngine implements ScriptEngineService {
public static class ExtractFieldScriptEngine implements ScriptEngine {
public static final String NAME = "extract_field";
@ -508,7 +508,7 @@ public class SumIT extends AbstractNumericTestCase {
*/
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new FieldValueScriptEngine();
}
}
@ -517,7 +517,7 @@ public class SumIT extends AbstractNumericTestCase {
* This mock script returns the field value and adds one to the returned
* value
*/
public static class FieldValueScriptEngine implements ScriptEngineService {
public static class FieldValueScriptEngine implements ScriptEngine {
public static final String NAME = "field_value";

View File

@ -28,7 +28,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.InternalAggregation;
@ -251,7 +251,7 @@ public class ValueCountIT extends ESIntegTestCase {
*/
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new FieldValueScriptEngine();
}
}
@ -259,7 +259,7 @@ public class ValueCountIT extends ESIntegTestCase {
/**
* This mock script returns the field value. If the parameter map contains a parameter "s", the corresponding is used as field name.
*/
public static class FieldValueScriptEngine implements ScriptEngineService {
public static class FieldValueScriptEngine implements ScriptEngine {
public static final String NAME = "field_value";

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.aggregations.metrics.avg;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -24,9 +24,10 @@ import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.After;
import java.util.ArrayList;

View File

@ -20,9 +20,9 @@
package org.elasticsearch.search.aggregations.metrics.geobounds;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Collections;
import java.util.List;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.test.geo.RandomGeoGenerator;
import java.util.Collections;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.min;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -21,6 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
import java.util.List;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Arrays;
import java.util.List;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Arrays;
import java.util.List;

View File

@ -30,8 +30,8 @@ import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.io.IOException;
import java.util.Collections;

View File

@ -20,9 +20,9 @@ package org.elasticsearch.search.aggregations.metrics.sum;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -30,11 +30,11 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.Arrays;

View File

@ -20,9 +20,9 @@
package org.elasticsearch.search.aggregations.metrics.valuecount;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.List;
import java.util.Map;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.util.Collections;

View File

@ -21,10 +21,10 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.io.IOException;
import java.util.Collections;

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.pipeline.derivative;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.Collections;
import java.util.List;

View File

@ -56,7 +56,7 @@ import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptServiceTests.TestEngineService;
import org.elasticsearch.script.ScriptServiceTests.TestEngine;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.DocValueFormat;
@ -91,7 +91,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
.build();
Environment environment = new Environment(baseSettings);
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new TestEngineService()));
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new TestEngine()));
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
scriptService = new ScriptService(baseSettings, environment,
new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.ReduceSearchPhaseException;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
@ -34,7 +33,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder;
@ -1112,12 +1111,12 @@ public class SuggestSearchIT extends ESIntegTestCase {
public static class DummyTemplatePlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new DummyTemplateScriptEngine();
}
}
public static class DummyTemplateScriptEngine implements ScriptEngineService {
public static class DummyTemplateScriptEngine implements ScriptEngine {
// The collate query setter is hard coded to use mustache, so lets lie in this test about the script plugin,
// which makes the collate code thinks mustache is evaluating the query.

View File

@ -23,7 +23,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.AbstractExecutableScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptEngineService;
import org.elasticsearch.script.NativeScriptEngine;
import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
@ -57,7 +57,7 @@ public class UpdateByNativeScriptIT extends ESIntegTestCase {
Map<String, Object> params = new HashMap<>();
params.put("foo", "SETVALUE");
client().prepareUpdate("test", "type", "1")
.setScript(new Script(ScriptType.INLINE, NativeScriptEngineService.NAME, "custom", params)).get();
.setScript(new Script(ScriptType.INLINE, NativeScriptEngine.NAME, "custom", params)).get();
Map<String, Object> data = client().prepareGet("test", "type", "1").get().getSource();
assertThat(data, hasKey("foo"));

View File

@ -34,17 +34,14 @@ import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
@ -76,12 +73,12 @@ public class UpdateIT extends ESIntegTestCase {
public static class PutFieldValuesScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new PutFieldValuesScriptEngine();
}
}
public static class PutFieldValuesScriptEngine implements ScriptEngineService {
public static class PutFieldValuesScriptEngine implements ScriptEngine {
public static final String NAME = "put_values";
@ -149,12 +146,12 @@ public class UpdateIT extends ESIntegTestCase {
public static class FieldIncrementScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new FieldIncrementScriptEngine();
}
}
public static class FieldIncrementScriptEngine implements ScriptEngineService {
public static class FieldIncrementScriptEngine implements ScriptEngine {
public static final String NAME = "field_inc";
@ -215,12 +212,12 @@ public class UpdateIT extends ESIntegTestCase {
public static class ScriptedUpsertScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new ScriptedUpsertScriptEngine();
}
}
public static class ScriptedUpsertScriptEngine implements ScriptEngineService {
public static class ScriptedUpsertScriptEngine implements ScriptEngine {
public static final String NAME = "scripted_upsert";
@ -282,12 +279,12 @@ public class UpdateIT extends ESIntegTestCase {
public static class ExtractContextInSourceScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new ExtractContextInSourceScriptEngine();
}
}
public static class ExtractContextInSourceScriptEngine implements ScriptEngineService {
public static class ExtractContextInSourceScriptEngine implements ScriptEngine {
public static final String NAME = "extract_ctx";

View File

@ -302,50 +302,46 @@ PUT icu_sample
--------------------------------------------------
// CONSOLE
[[analysis-icu-collation]]
==== ICU Collation Token Filter
[WARNING]
======
This token filter has been deprecated since Lucene 5.0. Please use
<<analysis-icu-collation-keyword-field, ICU Collation Keyword Field>>.
======
[[analysis-icu-collation-keyword-field]]
==== ICU Collation Keyword Field
Collations are used for sorting documents in a language-specific word order.
The `icu_collation` token filter is available to all indices and defaults to
using the
{defguide}/sorting-collations.html#uca[DUCET collation],
The `icu_collation_keyword` field type is available to all indices and will encode
the terms directly as bytes in a doc values field and a single indexed token just
like a standard {ref}/keyword.html[Keyword Field].
Defaults to using {defguide}/sorting-collations.html#uca[DUCET collation],
which is a best-effort attempt at language-neutral sorting.
Below is an example of how to set up a field for sorting German names in
``phonebook'' order:
[source,js]
--------------------------------------------------
PUT /my_index
--------------------------
PUT my_index
{
"settings": {
"analysis": {
"filter": {
"german_phonebook": {
"type": "icu_collation",
"language": "de",
"country": "DE",
"variant": "@collation=phonebook"
}
},
"analyzer": {
"german_phonebook": {
"tokenizer": "keyword",
"filter": [ "german_phonebook" ]
}
}
}
},
"mappings": {
"user": {
"properties": {
"name": { <1>
"name": { <1>
"type": "text",
"fields": {
"sort": { <2>
"type": "text",
"fielddata": true,
"analyzer": "german_phonebook"
"sort": { <2>
"type": "icu_collation_keyword",
"index": false,
"language": "de",
"country": "DE",
"variant": "@collation=phonebook"
}
}
}
@ -364,15 +360,47 @@ GET _search <3>
"sort": "name.sort"
}
--------------------------------------------------
--------------------------
// CONSOLE
<1> The `name` field uses the `standard` analyzer, and so support full text queries.
<2> The `name.sort` field uses the `keyword` analyzer to preserve the name as
a single token, and applies the `german_phonebook` token filter to index
the value in German phonebook sort order.
<2> The `name.sort` field is an `icu_collation_keyword` field that will preserve the name as
a single token doc values, and applies the German ``phonebook'' order.
<3> An example query which searches the `name` field and sorts on the `name.sort` field.
==== Parameters for ICU Collation Keyword Fields
The following parameters are accepted by `icu_collation_keyword` fields:
[horizontal]
`doc_values`::
Should the field be stored on disk in a column-stride fashion, so that it
can later be used for sorting, aggregations, or scripting? Accepts `true`
(default) or `false`.
`index`::
Should the field be searchable? Accepts `true` (default) or `false`.
`null_value`::
Accepts a string value which is substituted for any explicit `null`
values. Defaults to `null`, which means the field is treated as missing.
`store`::
Whether the field value should be stored and retrievable separately from
the {ref}/mapping-source-field.html[`_source`] field. Accepts `true` or `false`
(default).
`fields`::
Multi-fields allow the same string value to be indexed in multiple ways for
different purposes, such as one field for search and a multi-field for
sorting and aggregations.
===== Collation options
`strength`::
@ -404,14 +432,14 @@ Possible values: `shifted` or `non-ignorable`. Sets the alternate handling for
strength `quaternary` to be either shifted or non-ignorable. Which boils down
to ignoring punctuation and whitespace.
`caseLevel`::
`case_level`::
Possible values: `true` or `false` (default). Whether case level sorting is
required. When strength is set to `primary` this will ignore accent
differences.
`caseFirst`::
`case_first`::
Possible values: `lower` or `upper`. Useful to control which case is sorted
first when case is not ignored for strength `tertiary`. The default depends on
@ -424,11 +452,11 @@ according to their numeric representation. For example the value `egg-9` is
sorted before the value `egg-21`.
`variableTop`::
`variable_top`::
Single character or contraction. Controls what is variable for `alternate`.
`hiraganaQuaternaryMode`::
`hiragana_quaternary_mode`::
Possible values: `true` or `false`. Distinguishing between Katakana and
Hiragana characters in `quaternary` strength.

View File

@ -28,6 +28,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
@ -41,7 +42,7 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
private final MatrixStatsResults results;
/** per shard ctor */
protected InternalMatrixStats(String name, long count, RunningStats multiFieldStatsResults, MatrixStatsResults results,
InternalMatrixStats(String name, long count, RunningStats multiFieldStatsResults, MatrixStatsResults results,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
assert count >= 0;
@ -138,6 +139,10 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
return results.getCorrelation(fieldX, fieldY);
}
MatrixStatsResults getResults() {
return results;
}
static class Fields {
public static final String FIELDS = "fields";
public static final String NAME = "name";
@ -238,4 +243,16 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
return new InternalMatrixStats(name, results.getDocCount(), runningStats, results, pipelineAggregators(), getMetaData());
}
@Override
protected int doHashCode() {
return Objects.hash(stats, results);
}
@Override
protected boolean doEquals(Object obj) {
InternalMatrixStats other = (InternalMatrixStats) obj;
return Objects.equals(this.stats, other.stats) &&
Objects.equals(this.results, other.results);
}
}

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* Descriptive stats gathered per shard. Coordinating node computes final pearson product coefficient
@ -228,4 +229,18 @@ class MatrixStatsResults implements Writeable {
correlation.put(rowName, corRow);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MatrixStatsResults that = (MatrixStatsResults) o;
return Objects.equals(results, that.results) &&
Objects.equals(correlation, that.correlation);
}
@Override
public int hashCode() {
return Objects.hash(results, correlation);
}
}

View File

@ -28,6 +28,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* Descriptive stats gathered per shard. Coordinating node computes final correlation and covariance stats
@ -53,11 +54,11 @@ public class RunningStats implements Writeable, Cloneable {
/** covariance values */
protected HashMap<String, HashMap<String, Double>> covariances;
public RunningStats() {
RunningStats() {
init();
}
public RunningStats(final String[] fieldNames, final double[] fieldVals) {
RunningStats(final String[] fieldNames, final double[] fieldVals) {
if (fieldVals != null && fieldVals.length > 0) {
init();
this.add(fieldNames, fieldVals);
@ -309,4 +310,24 @@ public class RunningStats implements Writeable, Cloneable {
throw new ElasticsearchException("Error trying to create a copy of RunningStats");
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RunningStats that = (RunningStats) o;
return docCount == that.docCount &&
Objects.equals(fieldSum, that.fieldSum) &&
Objects.equals(counts, that.counts) &&
Objects.equals(means, that.means) &&
Objects.equals(variances, that.variances) &&
Objects.equals(skewness, that.skewness) &&
Objects.equals(kurtosis, that.kurtosis) &&
Objects.equals(covariances, that.covariances);
}
@Override
public int hashCode() {
return Objects.hash(docCount, fieldSum, counts, means, variances, skewness, kurtosis, covariances);
}
}

View File

@ -22,15 +22,12 @@ import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import java.util.ArrayList;
import java.util.HashMap;
import static org.hamcrest.Matchers.equalTo;
public abstract class BaseMatrixStatsTestCase extends ESTestCase {
protected final int numObs = atLeast(10000);
protected final ArrayList<Double> fieldA = new ArrayList<>(numObs);
protected final ArrayList<Double> fieldB = new ArrayList<>(numObs);
protected final MultiPassStats actualStats = new MultiPassStats();
protected final MultiPassStats actualStats = new MultiPassStats(fieldAKey, fieldBKey);
protected static final String fieldAKey = "fieldA";
protected static final String fieldBKey = "fieldB";
@ -47,123 +44,4 @@ public abstract class BaseMatrixStatsTestCase extends ESTestCase {
actualStats.computeStats(fieldA, fieldB);
}
static class MultiPassStats {
long count;
HashMap<String, Double> means = new HashMap<>();
HashMap<String, Double> variances = new HashMap<>();
HashMap<String, Double> skewness = new HashMap<>();
HashMap<String, Double> kurtosis = new HashMap<>();
HashMap<String, HashMap<String, Double>> covariances = new HashMap<>();
HashMap<String, HashMap<String, Double>> correlations = new HashMap<>();
@SuppressWarnings("unchecked")
void computeStats(final ArrayList<Double> fieldA, final ArrayList<Double> fieldB) {
// set count
count = fieldA.size();
double meanA = 0d;
double meanB = 0d;
// compute mean
for (int n = 0; n < count; ++n) {
// fieldA
meanA += fieldA.get(n);
meanB += fieldB.get(n);
}
means.put(fieldAKey, meanA/count);
means.put(fieldBKey, meanB/count);
// compute variance, skewness, and kurtosis
double dA;
double dB;
double skewA = 0d;
double skewB = 0d;
double kurtA = 0d;
double kurtB = 0d;
double varA = 0d;
double varB = 0d;
double cVar = 0d;
for (int n = 0; n < count; ++n) {
dA = fieldA.get(n) - means.get(fieldAKey);
varA += dA * dA;
skewA += dA * dA * dA;
kurtA += dA * dA * dA * dA;
dB = fieldB.get(n) - means.get(fieldBKey);
varB += dB * dB;
skewB += dB * dB * dB;
kurtB += dB * dB * dB * dB;
cVar += dA * dB;
}
variances.put(fieldAKey, varA / (count - 1));
final double stdA = Math.sqrt(variances.get(fieldAKey));
variances.put(fieldBKey, varB / (count - 1));
final double stdB = Math.sqrt(variances.get(fieldBKey));
skewness.put(fieldAKey, skewA / ((count - 1) * variances.get(fieldAKey) * stdA));
skewness.put(fieldBKey, skewB / ((count - 1) * variances.get(fieldBKey) * stdB));
kurtosis.put(fieldAKey, kurtA / ((count - 1) * variances.get(fieldAKey) * variances.get(fieldAKey)));
kurtosis.put(fieldBKey, kurtB / ((count - 1) * variances.get(fieldBKey) * variances.get(fieldBKey)));
// compute covariance
final HashMap<String, Double> fieldACovar = new HashMap<>(2);
fieldACovar.put(fieldAKey, 1d);
cVar /= count - 1;
fieldACovar.put(fieldBKey, cVar);
covariances.put(fieldAKey, fieldACovar);
final HashMap<String, Double> fieldBCovar = new HashMap<>(2);
fieldBCovar.put(fieldAKey, cVar);
fieldBCovar.put(fieldBKey, 1d);
covariances.put(fieldBKey, fieldBCovar);
// compute correlation
final HashMap<String, Double> fieldACorr = new HashMap<>();
fieldACorr.put(fieldAKey, 1d);
double corr = covariances.get(fieldAKey).get(fieldBKey);
corr /= stdA * stdB;
fieldACorr.put(fieldBKey, corr);
correlations.put(fieldAKey, fieldACorr);
final HashMap<String, Double> fieldBCorr = new HashMap<>();
fieldBCorr.put(fieldAKey, corr);
fieldBCorr.put(fieldBKey, 1d);
correlations.put(fieldBKey, fieldBCorr);
}
public void assertNearlyEqual(MatrixStatsResults stats) {
assertThat(count, equalTo(stats.getDocCount()));
assertThat(count, equalTo(stats.getFieldCount(fieldAKey)));
assertThat(count, equalTo(stats.getFieldCount(fieldBKey)));
// means
assertTrue(nearlyEqual(means.get(fieldAKey), stats.getMean(fieldAKey), 1e-7));
assertTrue(nearlyEqual(means.get(fieldBKey), stats.getMean(fieldBKey), 1e-7));
// variances
assertTrue(nearlyEqual(variances.get(fieldAKey), stats.getVariance(fieldAKey), 1e-7));
assertTrue(nearlyEqual(variances.get(fieldBKey), stats.getVariance(fieldBKey), 1e-7));
// skewness (multi-pass is more susceptible to round-off error so we need to slightly relax the tolerance)
assertTrue(nearlyEqual(skewness.get(fieldAKey), stats.getSkewness(fieldAKey), 1e-4));
assertTrue(nearlyEqual(skewness.get(fieldBKey), stats.getSkewness(fieldBKey), 1e-4));
// kurtosis (multi-pass is more susceptible to round-off error so we need to slightly relax the tolerance)
assertTrue(nearlyEqual(kurtosis.get(fieldAKey), stats.getKurtosis(fieldAKey), 1e-4));
assertTrue(nearlyEqual(kurtosis.get(fieldBKey), stats.getKurtosis(fieldBKey), 1e-4));
// covariances
assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey), stats.getCovariance(fieldAKey, fieldBKey), 1e-7));
assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey), stats.getCovariance(fieldBKey, fieldAKey), 1e-7));
// correlation
assertTrue(nearlyEqual(correlations.get(fieldAKey).get(fieldBKey), stats.getCorrelation(fieldAKey, fieldBKey), 1e-7));
assertTrue(nearlyEqual(correlations.get(fieldBKey).get(fieldAKey), stats.getCorrelation(fieldBKey, fieldAKey), 1e-7));
}
}
private static boolean nearlyEqual(double a, double b, double epsilon) {
final double absA = Math.abs(a);
final double absB = Math.abs(b);
final double diff = Math.abs(a - b);
if (a == b) { // shortcut, handles infinities
return true;
} else if (a == 0 || b == 0 || diff < Double.MIN_NORMAL) {
// a or b is zero or both are extremely close to it
// relative error is less meaningful here
return diff < (epsilon * Double.MIN_NORMAL);
} else { // use relative error
return diff / Math.min((absA + absB), Double.MAX_VALUE) < epsilon;
}
}
}

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.matrix.stats;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class InternalMatrixStatsTests extends InternalAggregationTestCase<InternalMatrixStats> {
@Override
protected InternalMatrixStats createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
int numFields = randomInt(128);
String[] fieldNames = new String[numFields];
double[] fieldValues = new double[numFields];
for (int i = 0; i < numFields; i++) {
fieldNames[i] = Integer.toString(i);
fieldValues[i] = randomDouble();
}
RunningStats runningStats = new RunningStats();
runningStats.add(fieldNames, fieldValues);
MatrixStatsResults matrixStatsResults = randomBoolean() ? new MatrixStatsResults(runningStats) : null;
return new InternalMatrixStats("_name", 1L, runningStats, matrixStatsResults, Collections.emptyList(), Collections.emptyMap());
}
@Override
protected Writeable.Reader<InternalMatrixStats> instanceReader() {
return InternalMatrixStats::new;
}
@Override
public void testReduceRandom() {
int numValues = 10000;
int numShards = randomIntBetween(1, 20);
int valuesPerShard = (int) Math.floor(numValues / numShards);
List<Double> aValues = new ArrayList<>();
List<Double> bValues = new ArrayList<>();
RunningStats runningStats = new RunningStats();
List<InternalAggregation> shardResults = new ArrayList<>();
int valuePerShardCounter = 0;
for (int i = 0; i < numValues; i++) {
double valueA = randomDouble();
aValues.add(valueA);
double valueB = randomDouble();
bValues.add(valueB);
runningStats.add(new String[]{"a", "b"}, new double[]{valueA, valueB});
if (++valuePerShardCounter == valuesPerShard) {
shardResults.add(new InternalMatrixStats("_name", 1L, runningStats, null, Collections.emptyList(), Collections.emptyMap()));
runningStats = new RunningStats();
valuePerShardCounter = 0;
}
}
if (valuePerShardCounter != 0) {
shardResults.add(new InternalMatrixStats("_name", 1L, runningStats, null, Collections.emptyList(), Collections.emptyMap()));
}
MultiPassStats multiPassStats = new MultiPassStats("a", "b");
multiPassStats.computeStats(aValues, bValues);
ScriptService mockScriptService = mockScriptService();
MockBigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService());
InternalAggregation.ReduceContext context =
new InternalAggregation.ReduceContext(bigArrays, mockScriptService, true);
InternalMatrixStats reduced = (InternalMatrixStats) shardResults.get(0).reduce(shardResults, context);
multiPassStats.assertNearlyEqual(reduced.getResults());
}
@Override
protected void assertReduced(InternalMatrixStats reduced, List<InternalMatrixStats> inputs) {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,155 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.matrix.stats;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
class MultiPassStats {
private final String fieldAKey;
private final String fieldBKey;
private long count;
private Map<String, Double> means = new HashMap<>();
private Map<String, Double> variances = new HashMap<>();
private Map<String, Double> skewness = new HashMap<>();
private Map<String, Double> kurtosis = new HashMap<>();
private Map<String, HashMap<String, Double>> covariances = new HashMap<>();
private Map<String, HashMap<String, Double>> correlations = new HashMap<>();
MultiPassStats(String fieldAName, String fieldBName) {
this.fieldAKey = fieldAName;
this.fieldBKey = fieldBName;
}
@SuppressWarnings("unchecked")
void computeStats(final List<Double> fieldA, final List<Double> fieldB) {
// set count
count = fieldA.size();
double meanA = 0d;
double meanB = 0d;
// compute mean
for (int n = 0; n < count; ++n) {
// fieldA
meanA += fieldA.get(n);
meanB += fieldB.get(n);
}
means.put(fieldAKey, meanA / count);
means.put(fieldBKey, meanB / count);
// compute variance, skewness, and kurtosis
double dA;
double dB;
double skewA = 0d;
double skewB = 0d;
double kurtA = 0d;
double kurtB = 0d;
double varA = 0d;
double varB = 0d;
double cVar = 0d;
for (int n = 0; n < count; ++n) {
dA = fieldA.get(n) - means.get(fieldAKey);
varA += dA * dA;
skewA += dA * dA * dA;
kurtA += dA * dA * dA * dA;
dB = fieldB.get(n) - means.get(fieldBKey);
varB += dB * dB;
skewB += dB * dB * dB;
kurtB += dB * dB * dB * dB;
cVar += dA * dB;
}
variances.put(fieldAKey, varA / (count - 1));
final double stdA = Math.sqrt(variances.get(fieldAKey));
variances.put(fieldBKey, varB / (count - 1));
final double stdB = Math.sqrt(variances.get(fieldBKey));
skewness.put(fieldAKey, skewA / ((count - 1) * variances.get(fieldAKey) * stdA));
skewness.put(fieldBKey, skewB / ((count - 1) * variances.get(fieldBKey) * stdB));
kurtosis.put(fieldAKey, kurtA / ((count - 1) * variances.get(fieldAKey) * variances.get(fieldAKey)));
kurtosis.put(fieldBKey, kurtB / ((count - 1) * variances.get(fieldBKey) * variances.get(fieldBKey)));
// compute covariance
final HashMap<String, Double> fieldACovar = new HashMap<>(2);
fieldACovar.put(fieldAKey, 1d);
cVar /= count - 1;
fieldACovar.put(fieldBKey, cVar);
covariances.put(fieldAKey, fieldACovar);
final HashMap<String, Double> fieldBCovar = new HashMap<>(2);
fieldBCovar.put(fieldAKey, cVar);
fieldBCovar.put(fieldBKey, 1d);
covariances.put(fieldBKey, fieldBCovar);
// compute correlation
final HashMap<String, Double> fieldACorr = new HashMap<>();
fieldACorr.put(fieldAKey, 1d);
double corr = covariances.get(fieldAKey).get(fieldBKey);
corr /= stdA * stdB;
fieldACorr.put(fieldBKey, corr);
correlations.put(fieldAKey, fieldACorr);
final HashMap<String, Double> fieldBCorr = new HashMap<>();
fieldBCorr.put(fieldAKey, corr);
fieldBCorr.put(fieldBKey, 1d);
correlations.put(fieldBKey, fieldBCorr);
}
void assertNearlyEqual(MatrixStatsResults stats) {
assertEquals(count, stats.getDocCount());
assertEquals(count, stats.getFieldCount(fieldAKey));
assertEquals(count, stats.getFieldCount(fieldBKey));
// means
assertTrue(nearlyEqual(means.get(fieldAKey), stats.getMean(fieldAKey), 1e-7));
assertTrue(nearlyEqual(means.get(fieldBKey), stats.getMean(fieldBKey), 1e-7));
// variances
assertTrue(nearlyEqual(variances.get(fieldAKey), stats.getVariance(fieldAKey), 1e-7));
assertTrue(nearlyEqual(variances.get(fieldBKey), stats.getVariance(fieldBKey), 1e-7));
// skewness (multi-pass is more susceptible to round-off error so we need to slightly relax the tolerance)
assertTrue(nearlyEqual(skewness.get(fieldAKey), stats.getSkewness(fieldAKey), 1e-4));
assertTrue(nearlyEqual(skewness.get(fieldBKey), stats.getSkewness(fieldBKey), 1e-4));
// kurtosis (multi-pass is more susceptible to round-off error so we need to slightly relax the tolerance)
assertTrue(nearlyEqual(kurtosis.get(fieldAKey), stats.getKurtosis(fieldAKey), 1e-4));
assertTrue(nearlyEqual(kurtosis.get(fieldBKey), stats.getKurtosis(fieldBKey), 1e-4));
// covariances
assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey),stats.getCovariance(fieldAKey, fieldBKey), 1e-7));
assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey),stats.getCovariance(fieldBKey, fieldAKey), 1e-7));
// correlation
assertTrue(nearlyEqual(correlations.get(fieldAKey).get(fieldBKey), stats.getCorrelation(fieldAKey, fieldBKey), 1e-7));
assertTrue(nearlyEqual(correlations.get(fieldBKey).get(fieldAKey), stats.getCorrelation(fieldBKey, fieldAKey), 1e-7));
}
private static boolean nearlyEqual(double a, double b, double epsilon) {
final double absA = Math.abs(a);
final double absB = Math.abs(b);
final double diff = Math.abs(a - b);
if (a == b) { // shortcut, handles infinities
return true;
} else if (a == 0 || b == 0 || diff < Double.MIN_NORMAL) {
// a or b is zero or both are extremely close to it
// relative error is less meaningful here
return diff < (epsilon * Double.MIN_NORMAL);
} else { // use relative error
return diff / Math.min((absA + absB), Double.MAX_VALUE) < epsilon;
}
}
}

View File

@ -22,14 +22,12 @@ package org.elasticsearch.script.expression;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptEngine;
public class ExpressionPlugin extends Plugin implements ScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new ExpressionScriptEngineService(settings);
public ScriptEngine getScriptEngine(Settings settings) {
return new ExpressionScriptEngine(settings);
}
}

View File

@ -39,7 +39,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
@ -56,11 +56,11 @@ import java.util.Map;
* Provides the infrastructure for Lucene expressions as a scripting language for Elasticsearch. Only
* {@link SearchScript}s are supported.
*/
public class ExpressionScriptEngineService extends AbstractComponent implements ScriptEngineService {
public class ExpressionScriptEngine extends AbstractComponent implements ScriptEngine {
public static final String NAME = "expression";
public ExpressionScriptEngineService(Settings settings) {
public ExpressionScriptEngine(Settings settings) {
super(settings);
}

View File

@ -32,14 +32,14 @@ import java.text.ParseException;
import java.util.Collections;
public class ExpressionTests extends ESSingleNodeTestCase {
ExpressionScriptEngineService service;
ExpressionScriptEngine service;
SearchLookup lookup;
@Override
public void setUp() throws Exception {
super.setUp();
IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double");
service = new ExpressionScriptEngineService(Settings.EMPTY);
service = new ExpressionScriptEngine(Settings.EMPTY);
lookup = new SearchLookup(index.mapperService(), index.fieldData(), null);
}

View File

@ -444,15 +444,15 @@ public class MoreExpressionTests extends ESIntegTestCase {
.addAggregation(
AggregationBuilders.stats("int_agg").field("x")
.script(new Script(ScriptType.INLINE,
ExpressionScriptEngineService.NAME, "_value * 3", Collections.emptyMap())))
ExpressionScriptEngine.NAME, "_value * 3", Collections.emptyMap())))
.addAggregation(
AggregationBuilders.stats("double_agg").field("y")
.script(new Script(ScriptType.INLINE,
ExpressionScriptEngineService.NAME, "_value - 1.1", Collections.emptyMap())))
ExpressionScriptEngine.NAME, "_value - 1.1", Collections.emptyMap())))
.addAggregation(
AggregationBuilders.stats("const_agg").field("x") // specifically to test a script w/o _value
.script(new Script(ScriptType.INLINE,
ExpressionScriptEngineService.NAME, "3.0", Collections.emptyMap()))
ExpressionScriptEngine.NAME, "3.0", Collections.emptyMap()))
);
SearchResponse rsp = req.get();
@ -487,7 +487,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
.addAggregation(
AggregationBuilders.terms("term_agg").field("text")
.script(
new Script(ScriptType.INLINE, ExpressionScriptEngineService.NAME, "_value", Collections.emptyMap())));
new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value", Collections.emptyMap())));
String message;
try {
@ -577,7 +577,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
UpdateRequestBuilder urb = client().prepareUpdate().setIndex("test_index");
urb.setType("doc");
urb.setId("1");
urb.setScript(new Script(ScriptType.INLINE, ExpressionScriptEngineService.NAME, "0", Collections.emptyMap()));
urb.setScript(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "0", Collections.emptyMap()));
urb.get();
fail("Expression scripts should not be allowed to run as update scripts.");
} catch (Exception e) {
@ -609,7 +609,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
.subAggregation(sum("fourSum").field("four"))
.subAggregation(bucketScript("totalSum",
new Script(ScriptType.INLINE,
ExpressionScriptEngineService.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()),
ExpressionScriptEngine.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()),
"twoSum", "threeSum", "fourSum")))
.execute().actionGet();

View File

@ -52,14 +52,14 @@ public class StoredExpressionTests extends ESIntegTestCase {
public void testAllOpsDisabledIndexedScripts() throws IOException {
client().admin().cluster().preparePutStoredScript()
.setLang(ExpressionScriptEngineService.NAME)
.setLang(ExpressionScriptEngine.NAME)
.setId("script1")
.setContent(new BytesArray("{\"script\":\"2\"}"), XContentType.JSON)
.get();
client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get();
try {
client().prepareUpdate("test", "scriptTest", "1")
.setScript(new Script(ScriptType.STORED, ExpressionScriptEngineService.NAME, "script1", Collections.emptyMap())).get();
.setScript(new Script(ScriptType.STORED, ExpressionScriptEngine.NAME, "script1", Collections.emptyMap())).get();
fail("update script should have been rejected");
} catch(Exception e) {
assertThat(e.getMessage(), containsString("failed to execute script"));

View File

@ -19,6 +19,7 @@
package org.elasticsearch.script.mustache;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
@ -34,6 +35,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
public class MultiSearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest {
private int maxConcurrentSearchRequests = 0;
private List<SearchTemplateRequest> requests = new ArrayList<>();
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed();
@ -56,6 +58,26 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
return this;
}
/**
* Returns the amount of search requests specified in this multi search requests are allowed to be ran concurrently.
*/
public int maxConcurrentSearchRequests() {
return maxConcurrentSearchRequests;
}
/**
* Sets how many search requests specified in this multi search requests are allowed to be ran concurrently.
*/
public MultiSearchTemplateRequest maxConcurrentSearchRequests(int maxConcurrentSearchRequests) {
if (maxConcurrentSearchRequests < 1) {
throw new IllegalArgumentException("maxConcurrentSearchRequests must be positive");
}
this.maxConcurrentSearchRequests = maxConcurrentSearchRequests;
return this;
}
public List<SearchTemplateRequest> requests() {
return this.requests;
}
@ -90,12 +112,18 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
if (in.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
maxConcurrentSearchRequests = in.readVInt();
}
requests = in.readStreamableList(SearchTemplateRequest::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
out.writeVInt(maxConcurrentSearchRequests);
}
out.writeStreamableList(requests);
}
}

View File

@ -30,10 +30,6 @@ public class MultiSearchTemplateRequestBuilder
super(client, action, new MultiSearchTemplateRequest());
}
public MultiSearchTemplateRequestBuilder(ElasticsearchClient client) {
this(client, MultiSearchTemplateAction.INSTANCE);
}
public MultiSearchTemplateRequestBuilder add(SearchTemplateRequest request) {
if (request.getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
&& request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
@ -58,4 +54,12 @@ public class MultiSearchTemplateRequestBuilder
request().indicesOptions(indicesOptions);
return this;
}
/**
* Sets how many search requests specified in this multi search requests are allowed to be ran concurrently.
*/
public MultiSearchTemplateRequestBuilder setMaxConcurrentSearchRequests(int maxConcurrentSearchRequests) {
request().maxConcurrentSearchRequests(maxConcurrentSearchRequests);
return this;
}
}

View File

@ -33,7 +33,7 @@ import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import java.util.Arrays;
import java.util.List;
@ -44,8 +44,8 @@ import static java.util.Collections.singletonList;
public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin, SearchPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new MustacheScriptEngineService();
public ScriptEngine getScriptEngine(Settings settings) {
return new MustacheScriptEngine();
}
@Override

View File

@ -34,7 +34,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.GeneralScriptException;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
@ -53,8 +53,8 @@ import java.util.Map;
* process: First compile the string representing the template, the resulting
* {@link Mustache} object can then be re-used for subsequent executions.
*/
public final class MustacheScriptEngineService implements ScriptEngineService {
private static final Logger logger = ESLoggerFactory.getLogger(MustacheScriptEngineService.class);
public final class MustacheScriptEngine implements ScriptEngine {
private static final Logger logger = ESLoggerFactory.getLogger(MustacheScriptEngine.class);
public static final String NAME = "mustache";

View File

@ -70,6 +70,10 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler {
*/
public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex) throws IOException {
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();
if (restRequest.hasParam("max_concurrent_searches")) {
multiRequest.maxConcurrentSearchRequests(restRequest.paramAsInt("max_concurrent_searches", 0));
}
RestMultiSearchAction.parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex,
(searchRequest, bytes) -> {
try {

View File

@ -20,59 +20,81 @@
package org.elasticsearch.script.mustache;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.TransportMultiSearchAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.script.mustache.TransportSearchTemplateAction.convert;
public class TransportMultiSearchTemplateAction extends HandledTransportAction<MultiSearchTemplateRequest, MultiSearchTemplateResponse> {
private final TransportSearchTemplateAction searchTemplateAction;
private final ScriptService scriptService;
private final NamedXContentRegistry xContentRegistry;
private final TransportMultiSearchAction multiSearchAction;
@Inject
public TransportMultiSearchTemplateAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver resolver,
TransportSearchTemplateAction searchTemplateAction) {
ScriptService scriptService, NamedXContentRegistry xContentRegistry,
TransportMultiSearchAction multiSearchAction) {
super(settings, MultiSearchTemplateAction.NAME, threadPool, transportService, actionFilters, resolver,
MultiSearchTemplateRequest::new);
this.searchTemplateAction = searchTemplateAction;
this.scriptService = scriptService;
this.xContentRegistry = xContentRegistry;
this.multiSearchAction = multiSearchAction;
}
@Override
protected void doExecute(MultiSearchTemplateRequest request, ActionListener<MultiSearchTemplateResponse> listener) {
final AtomicArray<MultiSearchTemplateResponse.Item> responses = new AtomicArray<>(request.requests().size());
final AtomicInteger counter = new AtomicInteger(responses.length());
for (int i = 0; i < responses.length(); i++) {
final int index = i;
searchTemplateAction.execute(request.requests().get(i), new ActionListener<SearchTemplateResponse>() {
@Override
public void onResponse(SearchTemplateResponse searchTemplateResponse) {
responses.set(index, new MultiSearchTemplateResponse.Item(searchTemplateResponse, null));
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Exception e) {
responses.set(index, new MultiSearchTemplateResponse.Item(null, e));
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
MultiSearchTemplateResponse.Item[] items = responses.toArray(new MultiSearchTemplateResponse.Item[responses.length()]);
listener.onResponse(new MultiSearchTemplateResponse(items));
}
});
List<Integer> originalSlots = new ArrayList<>();
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
multiSearchRequest.indicesOptions(request.indicesOptions());
if (request.maxConcurrentSearchRequests() != 0) {
multiSearchRequest.maxConcurrentSearchRequests(request.maxConcurrentSearchRequests());
}
MultiSearchTemplateResponse.Item[] items = new MultiSearchTemplateResponse.Item[request.requests().size()];
for (int i = 0; i < items.length; i++) {
SearchTemplateRequest searchTemplateRequest = request.requests().get(i);
SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse();
SearchRequest searchRequest;
try {
searchRequest = convert(searchTemplateRequest, searchTemplateResponse, scriptService, xContentRegistry);
} catch (Exception e) {
items[i] = new MultiSearchTemplateResponse.Item(null, e);
continue;
}
items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null);
if (searchRequest != null) {
multiSearchRequest.add(searchRequest);
originalSlots.add(i);
}
}
multiSearchAction.execute(multiSearchRequest, ActionListener.wrap(r -> {
for (int i = 0; i < r.getResponses().length; i++) {
MultiSearchResponse.Item item = r.getResponses()[i];
int originalSlot = originalSlots.get(i);
if (item.isFailure()) {
items[originalSlot] = new MultiSearchTemplateResponse.Item(null, item.getFailure());
} else {
items[originalSlot].getResponse().setResponse(item.getResponse());
}
}
listener.onResponse(new MultiSearchTemplateResponse(items));
}, listener::onFailure));
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.script.mustache;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
@ -35,22 +34,22 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.template.CompiledTemplate;
import org.elasticsearch.template.CompiledTemplate;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Collections;
import static org.elasticsearch.script.ScriptContext.Standard.SEARCH;
public class TransportSearchTemplateAction extends HandledTransportAction<SearchTemplateRequest, SearchTemplateResponse> {
private static final String TEMPLATE_LANG = MustacheScriptEngineService.NAME;
private static final String TEMPLATE_LANG = MustacheScriptEngine.NAME;
private final ScriptService scriptService;
private final TransportSearchAction searchAction;
@ -72,27 +71,8 @@ public class TransportSearchTemplateAction extends HandledTransportAction<Search
protected void doExecute(SearchTemplateRequest request, ActionListener<SearchTemplateResponse> listener) {
final SearchTemplateResponse response = new SearchTemplateResponse();
try {
Script script = new Script(request.getScriptType(), TEMPLATE_LANG, request.getScript(),
request.getScriptParams() == null ? Collections.emptyMap() : request.getScriptParams());
CompiledTemplate compiledScript = scriptService.compileTemplate(script, SEARCH);
BytesReference source = compiledScript.run(script.getParams());
response.setSource(source);
if (request.isSimulate()) {
listener.onResponse(response);
return;
}
// Executes the search
SearchRequest searchRequest = request.getRequest();
//we can assume the template is always json as we convert it before compiling it
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xContentRegistry, source)) {
SearchSourceBuilder builder = SearchSourceBuilder.searchSource();
builder.parseXContent(new QueryParseContext(parser));
builder.explain(request.isExplain());
builder.profile(request.isProfile());
searchRequest.source(builder);
SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry);
if (searchRequest != null) {
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
@ -109,9 +89,35 @@ public class TransportSearchTemplateAction extends HandledTransportAction<Search
listener.onFailure(t);
}
});
} else {
listener.onResponse(response);
}
} catch (Exception t) {
listener.onFailure(t);
} catch (IOException e) {
listener.onFailure(e);
}
}
static SearchRequest convert(SearchTemplateRequest searchTemplateRequest, SearchTemplateResponse response, ScriptService scriptService,
NamedXContentRegistry xContentRegistry) throws IOException {
Script script = new Script(searchTemplateRequest.getScriptType(), TEMPLATE_LANG, searchTemplateRequest.getScript(),
searchTemplateRequest.getScriptParams() == null ? Collections.emptyMap() : searchTemplateRequest.getScriptParams());
CompiledTemplate compiledScript = scriptService.compileTemplate(script, SEARCH);
BytesReference source = compiledScript.run(script.getParams());
response.setSource(source);
SearchRequest searchRequest = searchTemplateRequest.getRequest();
response.setSource(source);
if (searchTemplateRequest.isSimulate()) {
return null;
}
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xContentRegistry, source)) {
SearchSourceBuilder builder = SearchSourceBuilder.searchSource();
builder.parseXContent(new QueryParseContext(parser));
builder.explain(searchTemplateRequest.isExplain());
builder.profile(searchTemplateRequest.isProfile());
searchRequest.source(builder);
}
return searchRequest;
}
}

View File

@ -21,11 +21,10 @@ package org.elasticsearch.script.mustache;
import com.github.mustachejava.Mustache;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.test.ESTestCase;
import java.util.Map;
@ -62,11 +61,11 @@ public class CustomMustacheFactoryTests extends ESTestCase {
}
public void testJsonEscapeEncoder() {
final ScriptEngineService engine = new MustacheScriptEngineService();
final ScriptEngine engine = new MustacheScriptEngine();
final Map<String, String> params = randomBoolean() ? singletonMap(Script.CONTENT_TYPE_OPTION, JSON_MIME_TYPE) : emptyMap();
Mustache script = (Mustache) engine.compile(null, "{\"field\": \"{{value}}\"}", params);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngineService.NAME, script);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngine.NAME, script);
ExecutableScript executable = engine.executable(compiled, singletonMap("value", "a \"value\""));
BytesReference result = (BytesReference) executable.run();
@ -74,11 +73,11 @@ public class CustomMustacheFactoryTests extends ESTestCase {
}
public void testDefaultEncoder() {
final ScriptEngineService engine = new MustacheScriptEngineService();
final ScriptEngine engine = new MustacheScriptEngine();
final Map<String, String> params = singletonMap(Script.CONTENT_TYPE_OPTION, PLAIN_TEXT_MIME_TYPE);
Mustache script = (Mustache) engine.compile(null, "{\"field\": \"{{value}}\"}", params);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngineService.NAME, script);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngine.NAME, script);
ExecutableScript executable = engine.executable(compiled, singletonMap("value", "a \"value\""));
BytesReference result = (BytesReference) executable.run();
@ -86,11 +85,11 @@ public class CustomMustacheFactoryTests extends ESTestCase {
}
public void testUrlEncoder() {
final ScriptEngineService engine = new MustacheScriptEngineService();
final ScriptEngine engine = new MustacheScriptEngine();
final Map<String, String> params = singletonMap(Script.CONTENT_TYPE_OPTION, X_WWW_FORM_URLENCODED_MIME_TYPE);
Mustache script = (Mustache) engine.compile(null, "{\"field\": \"{{value}}\"}", params);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngineService.NAME, script);
CompiledScript compiled = new CompiledScript(INLINE, null, MustacheScriptEngine.NAME, script);
ExecutableScript executable = engine.executable(compiled, singletonMap("value", "tilde~ AND date:[2016 FROM*]"));
BytesReference result = (BytesReference) executable.run();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.script.mustache;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestRequest;
@ -90,4 +91,12 @@ public class MultiSearchTemplateRequestTests extends ESTestCase {
assertEquals("{\"query\":{\"match_{{template}}\":{}}}", request.requests().get(0).getScript());
assertEquals(1, request.requests().get(0).getScriptParams().size());
}
public void testMaxConcurrentSearchRequests() {
MultiSearchTemplateRequest request = new MultiSearchTemplateRequest();
request.maxConcurrentSearchRequests(randomIntBetween(1, Integer.MAX_VALUE));
expectThrows(IllegalArgumentException.class, () ->
request.maxConcurrentSearchRequests(randomIntBetween(Integer.MIN_VALUE, 0)));
}
}

View File

@ -42,12 +42,12 @@ import static org.hamcrest.Matchers.equalTo;
* Mustache based templating test
*/
public class MustacheScriptEngineTests extends ESTestCase {
private MustacheScriptEngineService qe;
private MustacheScriptEngine qe;
private MustacheFactory factory;
@Before
public void setup() {
qe = new MustacheScriptEngineService();
qe = new MustacheScriptEngine();
factory = new CustomMustacheFactory();
}

View File

@ -26,7 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matcher;
@ -55,7 +55,7 @@ import static org.hamcrest.Matchers.notNullValue;
public class MustacheTests extends ESTestCase {
private ScriptEngineService engine = new MustacheScriptEngineService();
private ScriptEngine engine = new MustacheScriptEngine();
public void testBasics() {
String template = "GET _search {\"query\": " + "{\"boosting\": {"

View File

@ -151,7 +151,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testIndexedTemplateClient() throws Exception {
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("testTemplate")
.setContent(new BytesArray("{" +
"\"template\":{" +
@ -164,7 +164,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("testTemplate").setContent(new BytesArray("{" +
"\"template\":{" +
" \"query\":{" +
@ -175,7 +175,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
"}"), XContentType.JSON));
GetStoredScriptResponse getResponse = client().admin().cluster()
.prepareGetStoredScript(MustacheScriptEngineService.NAME, "testTemplate").get();
.prepareGetStoredScript(MustacheScriptEngine.NAME, "testTemplate").get();
assertNotNull(getResponse.getSource());
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
@ -197,10 +197,10 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
assertHitCount(searchResponse.getResponse(), 4);
assertAcked(client().admin().cluster()
.prepareDeleteStoredScript(MustacheScriptEngineService.NAME, "testTemplate"));
.prepareDeleteStoredScript(MustacheScriptEngine.NAME, "testTemplate"));
getResponse = client().admin().cluster()
.prepareGetStoredScript(MustacheScriptEngineService.NAME, "testTemplate").get();
.prepareGetStoredScript(MustacheScriptEngine.NAME, "testTemplate").get();
assertNull(getResponse.getSource());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SearchTemplateRequestBuilder(client())
@ -212,7 +212,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testIndexedTemplate() throws Exception {
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("1a")
.setContent(new BytesArray("{" +
"\"template\":{" +
@ -225,7 +225,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
), XContentType.JSON)
);
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("2")
.setContent(new BytesArray("{" +
"\"template\":{" +
@ -237,7 +237,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
"}"), XContentType.JSON)
);
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("3")
.setContent(new BytesArray("{" +
"\"template\":{" +
@ -313,13 +313,13 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
int iterations = randomIntBetween(2, 11);
for (int i = 1; i < iterations; i++) {
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("git01")
.setContent(new BytesArray("{\"template\":{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," +
"\"type\": \"ooophrase_prefix\"}}}}}"), XContentType.JSON));
GetStoredScriptResponse getResponse = client().admin().cluster()
.prepareGetStoredScript(MustacheScriptEngineService.NAME, "git01").get();
.prepareGetStoredScript(MustacheScriptEngine.NAME, "git01").get();
assertNotNull(getResponse.getSource());
Map<String, Object> templateParams = new HashMap<>();
@ -333,7 +333,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]");
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("git01")
.setContent(new BytesArray("{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," +
"\"type\": \"phrase_prefix\"}}}}"), XContentType.JSON));
@ -351,7 +351,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
String multiQuery = "{\"query\":{\"terms\":{\"theField\":[\"{{#fieldParam}}\",\"{{.}}\",\"{{/fieldParam}}\"]}}}";
assertAcked(
client().admin().cluster().preparePutStoredScript()
.setLang(MustacheScriptEngineService.NAME)
.setLang(MustacheScriptEngine.NAME)
.setId("4")
.setContent(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes(), XContentType.JSON)
);

View File

@ -72,7 +72,7 @@ public final class Location {
/** Computes the file name (mostly important for stacktraces) */
public static String computeSourceName(String scriptName, String source) {
StringBuilder fileName = new StringBuilder();
if (scriptName.equals(PainlessScriptEngineService.INLINE_NAME)) {
if (scriptName.equals(PainlessScriptEngine.INLINE_NAME)) {
// its an anonymous script, include at least a portion of the source to help identify which one it is
// but don't create stacktraces with filenames that contain newlines or huge names.

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import java.util.Arrays;
import java.util.List;
@ -40,8 +40,8 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin {
}
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new PainlessScriptEngineService(settings);
public ScriptEngine getScriptEngine(Settings settings) {
return new PainlessScriptEngine(settings);
}
@Override

View File

@ -93,12 +93,12 @@ public abstract class PainlessScript {
}
// build a name for the script:
final String name;
if (PainlessScriptEngineService.INLINE_NAME.equals(this.name)) {
if (PainlessScriptEngine.INLINE_NAME.equals(this.name)) {
name = source;
} else {
name = this.name;
}
ScriptException scriptException = new ScriptException("runtime error", t, scriptStack, name, PainlessScriptEngineService.NAME);
ScriptException scriptException = new ScriptException("runtime error", t, scriptStack, name, PainlessScriptEngine.NAME);
for (Map.Entry<String, List<String>> entry : extraMetadata.entrySet()) {
scriptException.addMetadata(entry.getKey(), entry.getValue());
}

View File

@ -27,7 +27,7 @@ import org.elasticsearch.painless.Compiler.Loader;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
@ -46,7 +46,7 @@ import java.util.Map;
/**
* Implementation of a ScriptEngine for the Painless language.
*/
public final class PainlessScriptEngineService extends AbstractComponent implements ScriptEngineService {
public final class PainlessScriptEngine extends AbstractComponent implements ScriptEngine {
/**
* Standard name of the Painless language.
@ -71,7 +71,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme
/**
* Default compiler settings to be used. Note that {@link CompilerSettings} is mutable but this instance shouldn't be mutated outside
* of {@link PainlessScriptEngineService#PainlessScriptEngineService(Settings)}.
* of {@link PainlessScriptEngine#PainlessScriptEngine(Settings)}.
*/
private final CompilerSettings defaultCompilerSettings = new CompilerSettings();
@ -79,7 +79,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme
* Constructor.
* @param settings The settings to initialize the engine with.
*/
public PainlessScriptEngineService(final Settings settings) {
public PainlessScriptEngine(final Settings settings) {
super(settings);
defaultCompilerSettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(settings));
}
@ -262,7 +262,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme
break;
}
}
throw new ScriptException("compile error", t, scriptStack, scriptSource, PainlessScriptEngineService.NAME);
throw new ScriptException("compile error", t, scriptStack, scriptSource, PainlessScriptEngine.NAME);
}
// very simple heuristic: +/- 25 chars. can be improved later.

View File

@ -38,7 +38,7 @@ public class NeedsScoreTests extends ESSingleNodeTestCase {
public void testNeedsScores() {
IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double");
PainlessScriptEngineService service = new PainlessScriptEngineService(Settings.EMPTY);
PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY);
SearchLookup lookup = new SearchLookup(index.mapperService(), index.fieldData(), null);
Object compiled = service.compile(null, "1.2", Collections.emptyMap());

View File

@ -43,11 +43,11 @@ import static org.hamcrest.Matchers.hasSize;
* Typically just asserts the output of {@code exec()}
*/
public abstract class ScriptTestCase extends ESTestCase {
protected PainlessScriptEngineService scriptEngine;
protected PainlessScriptEngine scriptEngine;
@Before
public void setup() {
scriptEngine = new PainlessScriptEngineService(scriptEngineSettings());
scriptEngine = new PainlessScriptEngine(scriptEngineSettings());
}
/**

View File

@ -29,4 +29,4 @@ dependencies {
dependencyLicenses {
mapping from: /lucene-.*/, to: 'lucene'
}
}

View File

@ -0,0 +1,746 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RawCollationKey;
import com.ibm.icu.text.RuleBasedCollator;
import com.ibm.icu.util.ULocale;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.IndexableBinaryStringTools;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.LongSupplier;
public class ICUCollationKeywordFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "icu_collation_keyword";
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new CollationFieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.freeze();
}
public static final String NULL_VALUE = null;
}
public static final class CollationFieldType extends StringFieldType {
private Collator collator = null;
public CollationFieldType() {
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
protected CollationFieldType(CollationFieldType ref) {
super(ref);
this.collator = ref.collator;
}
public CollationFieldType clone() {
return new CollationFieldType(this);
}
@Override
public boolean equals(Object o) {
return super.equals(o) && Objects.equals(collator, ((CollationFieldType) o).collator);
}
@Override
public void checkCompatibility(MappedFieldType otherFT, List<String> conflicts, boolean strict) {
super.checkCompatibility(otherFT, conflicts, strict);
CollationFieldType other = (CollationFieldType) otherFT;
if (!Objects.equals(collator, other.collator)) {
conflicts.add("mapper [" + name() + "] has different [collator]");
}
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hashCode(collator);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
public Collator collator() {
return collator;
}
public void setCollator(Collator collator) {
checkIfFrozen();
this.collator = collator.isFrozen() ? collator : collator.freeze();
}
@Override
public Query nullValueQuery() {
if (nullValue() == null) {
return null;
}
return termQuery(nullValue(), null);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder();
}
@Override
protected BytesRef indexedValueForSearch(Object value) {
if (value == null) {
return null;
}
if (value instanceof BytesRef) {
value = ((BytesRef) value).utf8ToString();
}
if (collator != null) {
RawCollationKey key = collator.getRawCollationKey(value.toString(), null);
return new BytesRef(key.bytes, 0, key.size);
} else {
throw new IllegalStateException("collator is null");
}
}
@Override
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions,
boolean transpositions) {
throw new UnsupportedOperationException();
}
@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) {
throw new UnsupportedOperationException();
}
@Override
public Query regexpQuery(String value, int flags, int maxDeterminizedStates,
MultiTermQuery.RewriteMethod method, QueryShardContext context) {
throw new UnsupportedOperationException();
}
public static DocValueFormat COLLATE_FORMAT = new DocValueFormat() {
@Override
public String getWriteableName() {
return "collate";
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public String format(long value) {
throw new UnsupportedOperationException();
}
@Override
public String format(double value) {
throw new UnsupportedOperationException();
}
@Override
public String format(BytesRef value) {
int encodedLength = IndexableBinaryStringTools.getEncodedLength(value.bytes, value.offset, value.length);
char[] encoded = new char[encodedLength];
IndexableBinaryStringTools.encode(value.bytes, value.offset, value.length, encoded, 0, encodedLength);
return new String(encoded, 0, encodedLength);
}
@Override
public long parseLong(String value, boolean roundUp, LongSupplier now) {
throw new UnsupportedOperationException();
}
@Override
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
throw new UnsupportedOperationException();
}
@Override
public BytesRef parseBytesRef(String value) {
char[] encoded = value.toCharArray();
int decodedLength = IndexableBinaryStringTools.getDecodedLength(encoded, 0, encoded.length);
byte[] decoded = new byte[decodedLength];
IndexableBinaryStringTools.decode(encoded, 0, encoded.length, decoded, 0, decodedLength);
return new BytesRef(decoded);
}
};
@Override
public DocValueFormat docValueFormat(final String format, final DateTimeZone timeZone) {
return COLLATE_FORMAT;
}
}
public static class Builder extends FieldMapper.Builder<Builder, ICUCollationKeywordFieldMapper> {
private String rules = null;
private String language = null;
private String country = null;
private String variant = null;
private String strength = null;
private String decomposition = null;
private String alternate = null;
private boolean caseLevel = false;
private String caseFirst = null;
private boolean numeric = false;
private String variableTop = null;
private boolean hiraganaQuaternaryMode = false;
private String nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
}
@Override
public CollationFieldType fieldType() {
return (CollationFieldType) super.fieldType();
}
@Override
public Builder indexOptions(IndexOptions indexOptions) {
if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) > 0) {
throw new IllegalArgumentException("The [" + CONTENT_TYPE + "] field does not support positions, got [index_options]="
+ indexOptionToString(indexOptions));
}
return super.indexOptions(indexOptions);
}
public String rules() {
return rules;
}
public Builder rules(final String rules) {
this.rules = rules;
return this;
}
public String language() {
return language;
}
public Builder language(final String language) {
this.language = language;
return this;
}
public String country() {
return country;
}
public Builder country(final String country) {
this.country = country;
return this;
}
public String variant() {
return variant;
}
public Builder variant(final String variant) {
this.variant = variant;
return this;
}
public String strength() {
return strength;
}
public Builder strength(final String strength) {
this.strength = strength;
return this;
}
public String decomposition() {
return decomposition;
}
public Builder decomposition(final String decomposition) {
this.decomposition = decomposition;
return this;
}
public String alternate() {
return alternate;
}
public Builder alternate(final String alternate) {
this.alternate = alternate;
return this;
}
public boolean caseLevel() {
return caseLevel;
}
public Builder caseLevel(final boolean caseLevel) {
this.caseLevel = caseLevel;
return this;
}
public String caseFirst() {
return caseFirst;
}
public Builder caseFirst(final String caseFirst) {
this.caseFirst = caseFirst;
return this;
}
public boolean numeric() {
return numeric;
}
public Builder numeric(final boolean numeric) {
this.numeric = numeric;
return this;
}
public String variableTop() {
return variableTop;
}
public Builder variableTop(final String variableTop) {
this.variableTop = variableTop;
return this;
}
public boolean hiraganaQuaternaryMode() {
return hiraganaQuaternaryMode;
}
public Builder hiraganaQuaternaryMode(final boolean hiraganaQuaternaryMode) {
this.hiraganaQuaternaryMode = hiraganaQuaternaryMode;
return this;
}
public Collator buildCollator() {
Collator collator;
if (rules != null) {
try {
collator = new RuleBasedCollator(rules);
} catch (Exception e) {
throw new IllegalArgumentException("Failed to parse collation rules", e);
}
} else {
if (language != null) {
ULocale locale;
if (country != null) {
if (variant != null) {
locale = new ULocale(language, country, variant);
} else {
locale = new ULocale(language, country);
}
} else {
locale = new ULocale(language);
}
collator = Collator.getInstance(locale);
} else {
collator = Collator.getInstance();
}
}
// set the strength flag, otherwise it will be the default.
if (strength != null) {
if (strength.equalsIgnoreCase("primary")) {
collator.setStrength(Collator.PRIMARY);
} else if (strength.equalsIgnoreCase("secondary")) {
collator.setStrength(Collator.SECONDARY);
} else if (strength.equalsIgnoreCase("tertiary")) {
collator.setStrength(Collator.TERTIARY);
} else if (strength.equalsIgnoreCase("quaternary")) {
collator.setStrength(Collator.QUATERNARY);
} else if (strength.equalsIgnoreCase("identical")) {
collator.setStrength(Collator.IDENTICAL);
} else {
throw new IllegalArgumentException("Invalid strength: " + strength);
}
}
// set the decomposition flag, otherwise it will be the default.
if (decomposition != null) {
if (decomposition.equalsIgnoreCase("no")) {
collator.setDecomposition(Collator.NO_DECOMPOSITION);
} else if (decomposition.equalsIgnoreCase("canonical")) {
collator.setDecomposition(Collator.CANONICAL_DECOMPOSITION);
} else {
throw new IllegalArgumentException("Invalid decomposition: " + decomposition);
}
}
// expert options: concrete subclasses are always a RuleBasedCollator
RuleBasedCollator rbc = (RuleBasedCollator) collator;
if (alternate != null) {
if (alternate.equalsIgnoreCase("shifted")) {
rbc.setAlternateHandlingShifted(true);
} else if (alternate.equalsIgnoreCase("non-ignorable")) {
rbc.setAlternateHandlingShifted(false);
} else {
throw new IllegalArgumentException("Invalid alternate: " + alternate);
}
}
if (caseLevel) {
rbc.setCaseLevel(true);
}
if (caseFirst != null) {
if (caseFirst.equalsIgnoreCase("lower")) {
rbc.setLowerCaseFirst(true);
} else if (caseFirst.equalsIgnoreCase("upper")) {
rbc.setUpperCaseFirst(true);
} else {
throw new IllegalArgumentException("Invalid caseFirst: " + caseFirst);
}
}
if (numeric) {
rbc.setNumericCollation(true);
}
if (variableTop != null) {
rbc.setVariableTop(variableTop);
}
if (hiraganaQuaternaryMode) {
rbc.setHiraganaQuaternary(true);
}
// freeze so thread-safe
return collator.freeze();
}
@Override
public ICUCollationKeywordFieldMapper build(BuilderContext context) {
final Collator collator = buildCollator();
fieldType().setCollator(collator);
setupFieldType(context);
return new ICUCollationKeywordFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(),
multiFieldsBuilder.build(this, context), copyTo, rules, language, country, variant, strength, decomposition,
alternate, caseLevel, caseFirst, numeric, variableTop, hiraganaQuaternaryMode, collator);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
Builder builder = new Builder(name);
TypeParsers.parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
switch (fieldName) {
case "null_value":
if (fieldNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(fieldNode.toString());
iterator.remove();
break;
case "norms":
builder.omitNorms(!XContentMapValues.nodeBooleanValue(fieldNode, "norms"));
iterator.remove();
break;
case "rules":
builder.rules(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "language":
builder.language(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "country":
builder.country(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "variant":
builder.variant(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "strength":
builder.strength(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "decomposition":
builder.decomposition(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "alternate":
builder.alternate(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "case_level":
builder.caseLevel(XContentMapValues.nodeBooleanValue(fieldNode, false));
iterator.remove();
break;
case "case_first":
builder.caseFirst(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "numeric":
builder.numeric(XContentMapValues.nodeBooleanValue(fieldNode, false));
iterator.remove();
break;
case "variable_top":
builder.variableTop(XContentMapValues.nodeStringValue(fieldNode, null));
iterator.remove();
break;
case "hiragana_quaternary_mode":
builder.hiraganaQuaternaryMode(XContentMapValues.nodeBooleanValue(fieldNode, false));
iterator.remove();
break;
default:
break;
}
}
return builder;
}
}
private final String rules;
private final String language;
private final String country;
private final String variant;
private final String strength;
private final String decomposition;
private final String alternate;
private final boolean caseLevel;
private final String caseFirst;
private final boolean numeric;
private final String variableTop;
private final boolean hiraganaQuaternaryMode;
private final Collator collator;
protected ICUCollationKeywordFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo, String rules, String language,
String country, String variant,
String strength, String decomposition, String alternate, boolean caseLevel, String caseFirst,
boolean numeric, String variableTop, boolean hiraganaQuaternaryMode, Collator collator) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
assert collator.isFrozen();
this.rules = rules;
this.language = language;
this.country = country;
this.variant = variant;
this.strength = strength;
this.decomposition = decomposition;
this.alternate = alternate;
this.caseLevel = caseLevel;
this.caseFirst = caseFirst;
this.numeric = numeric;
this.variableTop = variableTop;
this.hiraganaQuaternaryMode = hiraganaQuaternaryMode;
this.collator = collator;
}
@Override
public CollationFieldType fieldType() {
return (CollationFieldType) super.fieldType();
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
List<String> conflicts = new ArrayList<>();
ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) mergeWith;
if (!Objects.equals(rules, icuMergeWith.rules)) {
conflicts.add("Cannot update rules setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(language, icuMergeWith.language)) {
conflicts.add("Cannot update language setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(country, icuMergeWith.country)) {
conflicts.add("Cannot update country setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(variant, icuMergeWith.variant)) {
conflicts.add("Cannot update variant setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(strength, icuMergeWith.strength)) {
conflicts.add("Cannot update strength setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(decomposition, icuMergeWith.decomposition)) {
conflicts.add("Cannot update decomposition setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(alternate, icuMergeWith.alternate)) {
conflicts.add("Cannot update alternate setting for [" + CONTENT_TYPE + "]");
}
if (caseLevel != icuMergeWith.caseLevel) {
conflicts.add("Cannot update case_level setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(caseFirst, icuMergeWith.caseFirst)) {
conflicts.add("Cannot update case_first setting for [" + CONTENT_TYPE + "]");
}
if (numeric != icuMergeWith.numeric) {
conflicts.add("Cannot update numeric setting for [" + CONTENT_TYPE + "]");
}
if (!Objects.equals(variableTop, icuMergeWith.variableTop)) {
conflicts.add("Cannot update variable_top setting for [" + CONTENT_TYPE + "]");
}
if (hiraganaQuaternaryMode != icuMergeWith.hiraganaQuaternaryMode) {
conflicts.add("Cannot update hiragana_quaternary_mode setting for [" + CONTENT_TYPE + "]");
}
if (!conflicts.isEmpty()) {
throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts);
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || fieldType().nullValue() != null) {
builder.field("null_value", fieldType().nullValue());
}
if (includeDefaults || rules != null) {
builder.field("rules", rules);
}
if (includeDefaults || language != null) {
builder.field("language", language);
}
if (includeDefaults || country != null) {
builder.field("country", country);
}
if (includeDefaults || variant != null) {
builder.field("variant", variant);
}
if (includeDefaults || strength != null) {
builder.field("strength", strength);
}
if (includeDefaults || decomposition != null) {
builder.field("decomposition", decomposition);
}
if (includeDefaults || alternate != null) {
builder.field("alternate", alternate);
}
if (includeDefaults || caseLevel) {
builder.field("case_level", caseLevel);
}
if (includeDefaults || caseFirst != null) {
builder.field("case_first", caseFirst);
}
if (includeDefaults || numeric) {
builder.field("numeric", numeric);
}
if (includeDefaults || variableTop != null) {
builder.field("variable_top", variableTop);
}
if (includeDefaults || hiraganaQuaternaryMode) {
builder.field("hiragana_quaternary_mode", hiraganaQuaternaryMode);
}
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final String value;
if (context.externalValueSet()) {
value = context.externalValue().toString();
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
value = fieldType().nullValueAsString();
} else {
value = parser.textOrNull();
}
}
if (value == null) {
return;
}
RawCollationKey key = collator.getRawCollationKey(value, null);
final BytesRef binaryValue = new BytesRef(key.bytes, 0, key.size);
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().name(), binaryValue, fieldType());
fields.add(field);
}
if (fieldType().hasDocValues()) {
fields.add(new SortedDocValuesField(fieldType().name(), binaryValue));
}
}
}

View File

@ -19,6 +19,9 @@
package org.elasticsearch.plugin.analysis.icu;
import static java.util.Collections.singletonMap;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.IcuCollationTokenFilterFactory;
import org.elasticsearch.index.analysis.IcuFoldingTokenFilterFactory;
@ -28,16 +31,20 @@ import org.elasticsearch.index.analysis.IcuTokenizerFactory;
import org.elasticsearch.index.analysis.IcuTransformTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.mapper.ICUCollationKeywordFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.DocValueFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonMap;
public class AnalysisICUPlugin extends Plugin implements AnalysisPlugin {
public class AnalysisICUPlugin extends Plugin implements AnalysisPlugin, MapperPlugin {
@Override
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
return singletonMap("icu_normalizer", IcuNormalizerCharFilterFactory::new);
@ -57,4 +64,20 @@ public class AnalysisICUPlugin extends Plugin implements AnalysisPlugin {
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("icu_tokenizer", IcuTokenizerFactory::new);
}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(ICUCollationKeywordFieldMapper.CONTENT_TYPE, new ICUCollationKeywordFieldMapper.TypeParser());
}
@Override
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return Collections.singletonList(
new NamedWriteableRegistry.Entry(
DocValueFormat.class,
ICUCollationKeywordFieldMapper.CollationFieldType.COLLATE_FORMAT.getWriteableName(),
in -> ICUCollationKeywordFieldMapper.CollationFieldType.COLLATE_FORMAT
)
);
}
}

View File

@ -0,0 +1,145 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RawCollationKey;
import com.ibm.icu.util.ULocale;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.ICUCollationKeywordFieldMapper.CollationFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class CollationFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new CollationFieldType();
}
public void testIsFieldWithinQuery() throws IOException {
CollationFieldType ft = new CollationFieldType();
// current impl ignores args and shourd always return INTERSECTS
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null,
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
randomBoolean(), randomBoolean(), null, null, null));
}
public void testTermQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
Collator collator = Collator.getInstance(new ULocale("tr"));
collator.setStrength(Collator.PRIMARY);
collator.freeze();
((CollationFieldType) ft).setCollator(collator);
RawCollationKey key = collator.getRawCollationKey("ı will use turkish casıng", null);
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
assertEquals(new TermQuery(new Term("field", expected)), ft.termQuery("I WİLL USE TURKİSH CASING", null));
ft.setIndexOptions(IndexOptions.NONE);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ft.termQuery("bar", null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
}
public void testTermsQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
Collator collator = Collator.getInstance().freeze();
((CollationFieldType) ft).setCollator(collator);
RawCollationKey fooKey = collator.getRawCollationKey("foo", null);
RawCollationKey barKey = collator.getRawCollationKey("bar", null);
List<BytesRef> terms = new ArrayList<>();
terms.add(new BytesRef(fooKey.bytes, 0, fooKey.size));
terms.add(new BytesRef(barKey.bytes, 0, barKey.size));
assertEquals(new TermInSetQuery("field", terms),
ft.termsQuery(Arrays.asList("foo", "bar"), null));
ft.setIndexOptions(IndexOptions.NONE);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ft.termsQuery(Arrays.asList("foo", "bar"), null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
}
public void testRegexpQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
expectThrows(UnsupportedOperationException.class,
() -> ft.regexpQuery("foo.*", 0, 10, null, null));
}
public void testFuzzyQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
expectThrows(UnsupportedOperationException.class,
() -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true));
}
public void testPrefixQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
expectThrows(UnsupportedOperationException.class,
() -> ft.prefixQuery("prefix", null, null));
}
public void testRangeQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
ft.setIndexOptions(IndexOptions.DOCS);
Collator collator = Collator.getInstance().freeze();
((CollationFieldType) ft).setCollator(collator);
RawCollationKey aKey = collator.getRawCollationKey("a", null);
RawCollationKey bKey = collator.getRawCollationKey("b", null);
TermRangeQuery expected = new TermRangeQuery("field", new BytesRef(aKey.bytes, 0, aKey.size),
new BytesRef(bKey.bytes, 0, bKey.size), false, false);
assertEquals(expected, ft.rangeQuery("a", "b", false, false, null));
ft.setIndexOptions(IndexOptions.NONE);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> ft.rangeQuery("a", "b", false, false, null));
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
}
}

View File

@ -0,0 +1,443 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RuleBasedCollator;
import com.ibm.icu.util.ULocale;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection;
import java.util.Collections;
public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(AnalysisICUPlugin.class);
}
/*
* Turkish has some funny casing.
* This test shows how you can solve this kind of thing easily with collation.
* Instead of using LowerCaseFilter, use a turkish collator with primary strength.
* Then things will sort and match correctly.
*/
public void testBasicUsage() throws Exception {
String index = "foo";
String type = "mytype";
String[] equilavent = {"I WİLL USE TURKİSH CASING", "ı will use turkish casıng"};
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "tr")
.field("strength", "primary")
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
// both values should collate to same value
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON)
);
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* Test usage of the decomposition option for unicode normalization.
*/
public void testNormalization() throws Exception {
String index = "foo";
String type = "mytype";
String[] equilavent = {"I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"};
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "tr")
.field("strength", "primary")
.field("decomposition", "canonical")
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON)
);
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* Test secondary strength, for english case is not significant.
*/
public void testSecondaryStrength() throws Exception {
String index = "foo";
String type = "mytype";
String[] equilavent = {"TESTING", "testing"};
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("strength", "secondary")
.field("decomposition", "no")
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* Setting alternate=shifted to shift whitespace, punctuation and symbols
* to quaternary level
*/
public void testIgnorePunctuation() throws Exception {
String index = "foo";
String type = "mytype";
String[] equilavent = {"foo-bar", "foo bar"};
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("strength", "primary")
.field("alternate", "shifted")
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* Setting alternate=shifted and variableTop to shift whitespace, but not
* punctuation or symbols, to quaternary level
*/
public void testIgnoreWhitespace() throws Exception {
String index = "foo";
String type = "mytype";
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("strength", "primary")
.field("alternate", "shifted")
.field("variable_top", " ")
.field("index", false)
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"foo bar\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"foobar\"}", XContentType.JSON),
client().prepareIndex(index, type, "3").setSource("{\"collate\":\"foo-bar\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.ASC) // secondary sort should kick in on docs 1 and 3 because same value collate value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 3L);
assertOrderedSearchHits(response, "3", "1", "2");
}
/*
* Setting numeric to encode digits with numeric value, so that
* foobar-9 sorts before foobar-10
*/
public void testNumerics() throws Exception {
String index = "foo";
String type = "mytype";
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("numeric", true)
.field("index", false)
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"foobar-10\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"foobar-9\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* Setting caseLevel=true to create an additional case level between
* secondary and tertiary
*/
public void testIgnoreAccentsButNotCase() throws Exception {
String index = "foo";
String type = "mytype";
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("strength", "primary")
.field("case_level", true)
.field("index", false)
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"résumé\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON),
client().prepareIndex(index, type, "3").setSource("{\"collate\":\"resume\"}", XContentType.JSON),
client().prepareIndex(index, type, "4").setSource("{\"collate\":\"Résumé\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.DESC)
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 4L);
assertOrderedSearchHits(response, "3", "1", "4", "2");
}
/*
* Setting caseFirst=upper to cause uppercase strings to sort
* before lowercase ones.
*/
public void testUpperCaseFirst() throws Exception {
String index = "foo";
String type = "mytype";
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("language", "en")
.field("strength", "tertiary")
.field("case_first", "upper")
.field("index", false)
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"resume\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
/*
* For german, you might want oe to sort and match with o umlaut.
* This is not the default, but you can make a customized ruleset to do this.
*
* The default is DIN 5007-1, this shows how to tailor a collator to get DIN 5007-2 behavior.
* http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4423383
*/
public void testCustomRules() throws Exception {
String index = "foo";
String type = "mytype";
RuleBasedCollator baseCollator = (RuleBasedCollator) Collator.getInstance(new ULocale("de_DE"));
String DIN5007_2_tailorings =
"& ae , a\u0308 & AE , A\u0308" +
"& oe , o\u0308 & OE , O\u0308" +
"& ue , u\u0308 & UE , u\u0308";
RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings);
String tailoredRules = tailoredCollator.getRules();
String[] equilavent = {"Töne", "Toene"};
XContentBuilder builder = jsonBuilder()
.startObject().startObject("properties")
.startObject("collate")
.field("type", "icu_collation_keyword")
.field("rules", tailoredRules)
.field("strength", "primary")
.endObject()
.endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
indexRandom(true,
client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON),
client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON)
);
SearchRequest request = new SearchRequest()
.indices(index)
.types(type)
.source(new SearchSourceBuilder()
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
assertNoFailures(response);
assertHitCount(response, 2L);
assertOrderedSearchHits(response, "2", "1");
}
}

View File

@ -0,0 +1,342 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import static org.hamcrest.Matchers.equalTo;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RawCollationKey;
import com.ibm.icu.util.ULocale;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
private static final String FIELD_TYPE = "icu_collation_keyword";
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Arrays.asList(AnalysisICUPlugin.class, InternalSettingsPlugin.class);
}
IndexService indexService;
DocumentMapperParser parser;
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
}
public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
Collator collator = Collator.getInstance();
RawCollationKey key = collator.getRawCollationKey("1234", null);
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
assertEquals(expected, fields[0].binaryValue());
IndexableFieldType fieldType = fields[0].fieldType();
assertThat(fieldType.omitNorms(), equalTo(true));
assertFalse(fieldType.tokenized());
assertFalse(fieldType.stored());
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
assertEquals(expected, fields[1].binaryValue());
fieldType = fields[1].fieldType();
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.NONE));
assertEquals(DocValuesType.SORTED, fieldType.docValuesType());
}
public void testNullValue() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("null_value", "1234").endObject().endObject()
.endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
XContentType.JSON));
Collator collator = Collator.getInstance();
RawCollationKey key = collator.getRawCollationKey("1234", null);
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(expected, fields[0].binaryValue());
}
public void testEnableStore() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("store", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertTrue(fields[0].fieldType().stored());
}
public void testDisableIndex() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(IndexOptions.NONE, fields[0].fieldType().indexOptions());
assertEquals(DocValuesType.SORTED, fields[0].fieldType().docValuesType());
}
public void testDisableDocValues() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("doc_values", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType());
}
public void testIndexOptions() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", "freqs").endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
for (String indexOptions : Arrays.asList("positions", "offsets")) {
final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", indexOptions).endObject().endObject()
.endObject().endObject().string();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping2)));
assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions,
e.getMessage());
}
}
public void testEnableNorms() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("norms", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertFalse(fields[0].fieldType().omitNorms());
}
public void testCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "I WİLL USE TURKİSH CASING")
.endObject()
.bytes(),
XContentType.JSON));
Collator collator = Collator.getInstance(new ULocale("tr"));
collator.setStrength(Collator.PRIMARY);
RawCollationKey key = collator.getRawCollationKey("ı will use turkish casıng", null); // should collate to same value
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(expected, fields[0].binaryValue());
IndexableFieldType fieldType = fields[0].fieldType();
assertThat(fieldType.omitNorms(), equalTo(true));
assertFalse(fieldType.tokenized());
assertFalse(fieldType.stored());
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
assertEquals(expected, fields[1].binaryValue());
fieldType = fields[1].fieldType();
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.NONE));
assertEquals(DocValuesType.SORTED, fieldType.docValuesType());
}
public void testUpdateCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject().string();
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean());
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "en")
.endObject().endObject().endObject().endObject().string();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("type",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, randomBoolean()));
assertEquals("Can't merge because of conflicts: [Cannot update language setting for [" + FIELD_TYPE
+ "], Cannot update strength setting for [" + FIELD_TYPE + "]]", e.getMessage());
}
}

View File

@ -25,7 +25,7 @@ import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
import org.elasticsearch.script.mustache.MustacheScriptEngine;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -41,7 +41,7 @@ public abstract class AbstractScriptTestCase extends ESTestCase {
Settings settings = Settings.builder()
.put("path.home", createTempDir())
.build();
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(new MustacheScriptEngineService()));
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(new MustacheScriptEngine()));
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);

View File

@ -24,6 +24,10 @@
"typed_keys": {
"type" : "boolean",
"description" : "Specify whether aggregation and suggester names should be prefixed by their respective types in the response"
},
"max_concurrent_searches" : {
"type" : "number",
"description" : "Controls the maximum number of concurrent searches the multi search api will execute"
}
}
},

View File

@ -175,13 +175,15 @@ public abstract class FieldTypeTestCase extends ESTestCase {
// TODO: remove this once toString is no longer final on FieldType...
protected void assertFieldTypeEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
if (ft1.equals(ft2) == false) {
fail("Expected equality, testing property " + property + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2) + "\n");
fail("Expected equality, testing property " + property + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2)
+ "\n");
}
}
protected void assertFieldTypeNotEquals(String property, MappedFieldType ft1, MappedFieldType ft2) {
if (ft1.equals(ft2)) {
fail("Expected inequality, testing property " + property + "\nfirst: " + toString(ft1) + "; \nsecond: " + toString(ft2) + "\n");
fail("Expected inequality, testing property " + property + "\nfirst: " + toString(ft1) + "; \nsecond: " + toString(ft2)
+ "\n");
}
}

View File

@ -46,7 +46,7 @@ import static java.util.Collections.emptyMap;
*
* The function is used to provide the result of the script execution and can return anything.
*/
public class MockScriptEngine implements ScriptEngineService {
public class MockScriptEngine implements ScriptEngine {
public static final String NAME = "mockscript";

View File

@ -34,7 +34,7 @@ public abstract class MockScriptPlugin extends Plugin implements ScriptPlugin {
public static final String NAME = MockScriptEngine.NAME;
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
public ScriptEngine getScriptEngine(Settings settings) {
return new MockScriptEngine(pluginScriptLang(), pluginScripts());
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.search.aggregations;
package org.elasticsearch.test;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
@ -76,16 +76,6 @@ import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggreg
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ParsedExtendedStatsBucket;
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import java.io.IOException;