* Removed `script.default_lang` setting and made `painless` the hardcoded default script language.
** The default script language is now maintained in `Script` class. * Added `script.legacy.default_lang` setting that controls the default language for scripts that are stored inside documents (for example percolator queries). This defaults to groovy. ** Added `QueryParseContext#getDefaultScriptLanguage()` that manages the default scripting language. Returns always `painless`, unless loading query/search request in legacy mode then the returns what is configured in `script.legacy.default_lang` setting. ** In the aggregation parsing code added `ParserContext` that also holds the default scripting language like `QueryParseContext`. Most parser don't have access to `QueryParseContext`. This is for scripts in aggregations. * The `lang` script field is always serialized (toXContent). Closes #20122
This commit is contained in:
parent
0d7dfcd798
commit
245882cde3
|
@ -25,8 +25,11 @@ import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -42,11 +45,18 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
private final XContentParser parser;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
private final String defaultScriptLanguage;
|
||||
|
||||
public QueryParseContext(IndicesQueriesRegistry registry, XContentParser parser, ParseFieldMatcher parseFieldMatcher) {
|
||||
this(Script.DEFAULT_SCRIPT_LANG, registry, parser, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public QueryParseContext(String defaultScriptLanguage, IndicesQueriesRegistry registry, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher) {
|
||||
this.indicesQueriesRegistry = Objects.requireNonNull(registry, "indices queries registry cannot be null");
|
||||
this.parser = Objects.requireNonNull(parser, "parser cannot be null");
|
||||
this.parseFieldMatcher = Objects.requireNonNull(parseFieldMatcher, "parse field matcher cannot be null");
|
||||
this.defaultScriptLanguage = defaultScriptLanguage;
|
||||
}
|
||||
|
||||
public XContentParser parser() {
|
||||
|
@ -127,4 +137,12 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
public ParseFieldMatcher getParseFieldMatcher() {
|
||||
return parseFieldMatcher;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default scripting language, that should be used if scripts don't specify the script language
|
||||
* explicitly.
|
||||
*/
|
||||
public String getDefaultScriptLanguage() {
|
||||
return defaultScriptLanguage;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
|
||||
/**
|
||||
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
|
||||
|
@ -101,9 +102,18 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||
|
||||
/**
|
||||
* Returns a new {@link QueryParseContext} that wraps the provided parser, using the ParseFieldMatcher settings that
|
||||
* are configured in the index settings
|
||||
* are configured in the index settings. The default script language will always default to Painless.
|
||||
*/
|
||||
public QueryParseContext newParseContext(XContentParser parser) {
|
||||
return new QueryParseContext(indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link QueryParseContext} like {@link #newParseContext(XContentParser)} with the only diffence, that
|
||||
* the default script language will default to what has been set in the 'script.legacy.default_lang' setting.
|
||||
*/
|
||||
public QueryParseContext newParseContextWithLegacyScriptLanguage(XContentParser parser) {
|
||||
String defaultScriptLanguage = ScriptSettings.getLegacyDefaultLang(indexSettings.getNodeSettings());
|
||||
return new QueryParseContext(defaultScriptLanguage, indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,7 +106,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher());
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher());
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
|
|||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher());
|
||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), NAME + " query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import java.util.Objects;
|
|||
public final class Script implements ToXContent, Writeable {
|
||||
|
||||
public static final ScriptType DEFAULT_TYPE = ScriptType.INLINE;
|
||||
public static final String DEFAULT_SCRIPT_LANG = "painless";
|
||||
|
||||
private String script;
|
||||
private ScriptType type;
|
||||
|
@ -60,7 +61,7 @@ public final class Script implements ToXContent, Writeable {
|
|||
this(script, ScriptType.INLINE, null, null);
|
||||
}
|
||||
|
||||
public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map<String, ?> params) {
|
||||
public Script(String script, ScriptType type, String lang, @Nullable Map<String, ?> params) {
|
||||
this(script, type, lang, params, null);
|
||||
}
|
||||
|
||||
|
@ -78,14 +79,14 @@ public final class Script implements ToXContent, Writeable {
|
|||
* when serializing the script back to xcontent.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map<String, ?> params,
|
||||
public Script(String script, ScriptType type, String lang, @Nullable Map<String, ?> params,
|
||||
@Nullable XContentType contentType) {
|
||||
if (contentType != null && type != ScriptType.INLINE) {
|
||||
throw new IllegalArgumentException("The parameter contentType only makes sense for inline scripts");
|
||||
}
|
||||
this.script = Objects.requireNonNull(script);
|
||||
this.type = Objects.requireNonNull(type);
|
||||
this.lang = lang;
|
||||
this.lang = lang == null ? DEFAULT_SCRIPT_LANG : lang;
|
||||
this.params = (Map<String, Object>) params;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
@ -135,7 +136,7 @@ public final class Script implements ToXContent, Writeable {
|
|||
* @return The type of script -- inline, stored, or file.
|
||||
*/
|
||||
public ScriptType getType() {
|
||||
return type == null ? DEFAULT_TYPE : type;
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -196,7 +197,7 @@ public final class Script implements ToXContent, Writeable {
|
|||
token = parser.nextToken();
|
||||
}
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
return new Script(parser.text());
|
||||
return new Script(parser.text(), ScriptType.INLINE, lang, null);
|
||||
}
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("expected a string value or an object, but found [{}] instead", token);
|
||||
|
|
|
@ -92,8 +92,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
public static final Setting<Integer> SCRIPT_MAX_COMPILATIONS_PER_MINUTE =
|
||||
Setting.intSetting("script.max_compilations_per_minute", 15, 0, Property.Dynamic, Property.NodeScope);
|
||||
|
||||
private final String defaultLang;
|
||||
|
||||
private final Collection<ScriptEngineService> scriptEngines;
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByLang;
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByExt;
|
||||
|
@ -131,8 +129,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
this.scriptContextRegistry = scriptContextRegistry;
|
||||
int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings);
|
||||
|
||||
this.defaultLang = scriptSettings.getDefaultScriptLanguageSetting().get(settings);
|
||||
|
||||
CacheBuilder<CacheKey, CompiledScript> cacheBuilder = CacheBuilder.builder();
|
||||
if (cacheMaxSize >= 0) {
|
||||
cacheBuilder.setMaximumWeight(cacheMaxSize);
|
||||
|
@ -222,11 +218,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
}
|
||||
|
||||
String lang = script.getLang();
|
||||
|
||||
if (lang == null) {
|
||||
lang = defaultLang;
|
||||
}
|
||||
|
||||
ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang);
|
||||
if (canExecuteScript(lang, script.getType(), scriptContext) == false) {
|
||||
throw new IllegalStateException("scripts of type [" + script.getType() + "], operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are disabled");
|
||||
|
@ -285,7 +276,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
|
||||
}
|
||||
|
||||
String lang = script.getLang() == null ? defaultLang : script.getLang();
|
||||
String lang = script.getLang();
|
||||
ScriptType type = script.getType();
|
||||
//script.getScript() could return either a name or code for a script,
|
||||
//but we check for a file script name first and an indexed script name second
|
||||
|
@ -364,9 +355,8 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
}
|
||||
|
||||
private String validateScriptLanguage(String scriptLang) {
|
||||
if (scriptLang == null) {
|
||||
scriptLang = defaultLang;
|
||||
} else if (scriptEnginesByLang.containsKey(scriptLang) == false) {
|
||||
Objects.requireNonNull(scriptLang);
|
||||
if (scriptEnginesByLang.containsKey(scriptLang) == false) {
|
||||
throw new IllegalArgumentException("script_lang not supported [" + scriptLang + "]");
|
||||
}
|
||||
return scriptLang;
|
||||
|
|
|
@ -32,7 +32,16 @@ import java.util.function.Function;
|
|||
|
||||
public class ScriptSettings {
|
||||
|
||||
public static final String DEFAULT_LANG = "painless";
|
||||
static final String LEGACY_DEFAULT_LANG = "groovy";
|
||||
|
||||
/**
|
||||
* The default script language to use for scripts that are stored in documents that have no script lang set explicitly.
|
||||
* This setting is legacy setting and only applies for indices created on ES versions prior to version 5.0
|
||||
*
|
||||
* This constant will be removed in the next major release.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String LEGACY_SCRIPT_SETTING = "script.legacy.default_lang";
|
||||
|
||||
private static final Map<ScriptService.ScriptType, Setting<Boolean>> SCRIPT_TYPE_SETTING_MAP;
|
||||
|
||||
|
@ -49,7 +58,7 @@ public class ScriptSettings {
|
|||
|
||||
private final Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap;
|
||||
private final List<Setting<Boolean>> scriptLanguageSettings;
|
||||
private final Setting<String> defaultScriptLanguageSetting;
|
||||
private final Setting<String> defaultLegacyScriptLanguageSetting;
|
||||
|
||||
public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) {
|
||||
Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap = contextSettings(scriptContextRegistry);
|
||||
|
@ -58,8 +67,8 @@ public class ScriptSettings {
|
|||
List<Setting<Boolean>> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry);
|
||||
this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings);
|
||||
|
||||
this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> {
|
||||
if (!DEFAULT_LANG.equals(setting) && !scriptEngineRegistry.getRegisteredLanguages().containsKey(setting)) {
|
||||
this.defaultLegacyScriptLanguageSetting = new Setting<>(LEGACY_SCRIPT_SETTING, LEGACY_DEFAULT_LANG, setting -> {
|
||||
if (!LEGACY_DEFAULT_LANG.equals(setting) && !scriptEngineRegistry.getRegisteredLanguages().containsKey(setting)) {
|
||||
throw new IllegalArgumentException("unregistered default language [" + setting + "]");
|
||||
}
|
||||
return setting;
|
||||
|
@ -160,7 +169,7 @@ public class ScriptSettings {
|
|||
settings.addAll(SCRIPT_TYPE_SETTING_MAP.values());
|
||||
settings.addAll(scriptContextSettingMap.values());
|
||||
settings.addAll(scriptLanguageSettings);
|
||||
settings.add(defaultScriptLanguageSetting);
|
||||
settings.add(defaultLegacyScriptLanguageSetting);
|
||||
return settings;
|
||||
}
|
||||
|
||||
|
@ -168,7 +177,11 @@ public class ScriptSettings {
|
|||
return scriptLanguageSettings;
|
||||
}
|
||||
|
||||
public Setting<String> getDefaultScriptLanguageSetting() {
|
||||
return defaultScriptLanguageSetting;
|
||||
public Setting<String> getDefaultLegacyScriptLanguageSetting() {
|
||||
return defaultLegacyScriptLanguageSetting;
|
||||
}
|
||||
|
||||
public static String getLegacyDefaultLang(Settings settings) {
|
||||
return settings.get(LEGACY_SCRIPT_SETTING, ScriptSettings.LEGACY_DEFAULT_LANG);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -65,16 +65,17 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.VALUE_NUMBER || token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, GeoHashGridParams.FIELD_PRECISION)) {
|
||||
if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_PRECISION)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_PRECISION, parser.intValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, GeoHashGridParams.FIELD_SIZE)) {
|
||||
} else if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_SIZE)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_SIZE, parser.intValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, GeoHashGridParams.FIELD_SHARD_SIZE)) {
|
||||
} else if (context.matchField(currentFieldName, GeoHashGridParams.FIELD_SHARD_SIZE)) {
|
||||
otherOptions.put(GeoHashGridParams.FIELD_SHARD_SIZE, parser.intValue());
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -79,10 +79,11 @@ public class DateHistogramParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
if (context.matchField(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
otherOptions.put(Histogram.INTERVAL_FIELD, new DateHistogramInterval(parser.text()));
|
||||
return true;
|
||||
|
@ -90,13 +91,13 @@ public class DateHistogramParser extends NumericValuesSourceParser {
|
|||
otherOptions.put(Histogram.INTERVAL_FIELD, parser.longValue());
|
||||
return true;
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
otherOptions.put(Histogram.MIN_DOC_COUNT_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
otherOptions.put(Histogram.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
otherOptions.put(Histogram.OFFSET_FIELD,
|
||||
DateHistogramAggregationBuilder.parseStringOffset(parser.text()));
|
||||
|
@ -109,7 +110,7 @@ public class DateHistogramParser extends NumericValuesSourceParser {
|
|||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
if (context.matchField(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
InternalOrder order = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -127,9 +128,10 @@ public class DateHistogramParser extends NumericValuesSourceParser {
|
|||
}
|
||||
otherOptions.put(Histogram.ORDER_FIELD, order);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, ExtendedBounds.EXTENDED_BOUNDS_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, ExtendedBounds.EXTENDED_BOUNDS_FIELD)) {
|
||||
try {
|
||||
otherOptions.put(ExtendedBounds.EXTENDED_BOUNDS_FIELD, ExtendedBounds.PARSER.apply(parser, () -> parseFieldMatcher));
|
||||
otherOptions.put(ExtendedBounds.EXTENDED_BOUNDS_FIELD,
|
||||
ExtendedBounds.PARSER.apply(parser, context::getParseFieldMatcher));
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Error parsing [{}]", e, aggregationName);
|
||||
}
|
||||
|
|
|
@ -19,13 +19,13 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -85,26 +85,27 @@ public class HistogramParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
if (context.matchField(currentFieldName, Histogram.INTERVAL_FIELD)) {
|
||||
otherOptions.put(Histogram.INTERVAL_FIELD, parser.doubleValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.MIN_DOC_COUNT_FIELD)) {
|
||||
otherOptions.put(Histogram.MIN_DOC_COUNT_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.KEYED_FIELD)) {
|
||||
otherOptions.put(Histogram.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, Histogram.OFFSET_FIELD)) {
|
||||
otherOptions.put(Histogram.OFFSET_FIELD, parser.doubleValue());
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
if (context.matchField(currentFieldName, Histogram.ORDER_FIELD)) {
|
||||
InternalOrder order = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -122,8 +123,8 @@ public class HistogramParser extends NumericValuesSourceParser {
|
|||
}
|
||||
otherOptions.put(Histogram.ORDER_FIELD, order);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Histogram.EXTENDED_BOUNDS_FIELD)) {
|
||||
double[] bounds = EXTENDED_BOUNDS_PARSER.apply(parser, () -> parseFieldMatcher);
|
||||
} else if (context.matchField(currentFieldName, Histogram.EXTENDED_BOUNDS_FIELD)) {
|
||||
double[] bounds = EXTENDED_BOUNDS_PARSER.apply(parser, context::getParseFieldMatcher);
|
||||
otherOptions.put(Histogram.EXTENDED_BOUNDS_FIELD, bounds);
|
||||
return true;
|
||||
} else {
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.missing;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -35,8 +35,8 @@ public class MissingParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -65,20 +66,21 @@ public class RangeParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Range range = parseRange(parser, parseFieldMatcher);
|
||||
Range range = parseRange(parser, context.getParseFieldMatcher());
|
||||
ranges.add(range);
|
||||
}
|
||||
otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, keyed);
|
||||
return true;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -30,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.GeoPointParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -110,28 +110,29 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (geoPointParser.token(aggregationName, currentFieldName, token, parser, parseFieldMatcher, otherOptions)) {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (geoPointParser.token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
return true;
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, UNIT_FIELD)) {
|
||||
if (context.matchField(currentFieldName, UNIT_FIELD)) {
|
||||
DistanceUnit unit = DistanceUnit.fromString(parser.text());
|
||||
otherOptions.put(UNIT_FIELD, unit);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
GeoDistance distanceType = GeoDistance.fromString(parser.text());
|
||||
otherOptions.put(DISTANCE_TYPE_FIELD, distanceType);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, keyed);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String fromAsStr = null;
|
||||
|
@ -144,17 +145,17 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
toOrFromOrKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
if (context.matchField(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
from = parser.doubleValue();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
} else if (context.matchField(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
to = parser.doubleValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.KEY_FIELD)) {
|
||||
if (context.matchField(toOrFromOrKey, Range.KEY_FIELD)) {
|
||||
key = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
} else if (context.matchField(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
fromAsStr = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
} else if (context.matchField(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
toAsStr = parser.text();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
@ -102,21 +103,22 @@ public class IpRangeParser extends BytesValuesSourceParser {
|
|||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName,
|
||||
Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher,
|
||||
Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (parseFieldMatcher.match(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
Token token,
|
||||
XContentParseContext context,
|
||||
Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (context.matchField(currentFieldName, RangeAggregator.RANGES_FIELD)) {
|
||||
if (parser.currentToken() != Token.START_ARRAY) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ranges] must be passed as an array, but got a " + token);
|
||||
}
|
||||
List<Range> ranges = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Range range = parseRange(parser, parseFieldMatcher);
|
||||
Range range = parseRange(parser, context.getParseFieldMatcher());
|
||||
ranges.add(range);
|
||||
}
|
||||
otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.KEYED_FIELD)) {
|
||||
} else if (context.matchField(parser.currentName(), RangeAggregator.KEYED_FIELD)) {
|
||||
otherOptions.put(RangeAggregator.KEYED_FIELD, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
|
|||
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -57,20 +57,21 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, SamplerAggregator.SHARD_SIZE_FIELD)) {
|
||||
if (context.matchField(currentFieldName, SamplerAggregator.SHARD_SIZE_FIELD)) {
|
||||
int shardSize = parser.intValue();
|
||||
otherOptions.put(SamplerAggregator.SHARD_SIZE_FIELD, shardSize);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD)) {
|
||||
} else if (context.matchField(currentFieldName, SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD)) {
|
||||
int maxDocsPerValue = parser.intValue();
|
||||
otherOptions.put(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD, maxDocsPerValue);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, SamplerAggregator.EXECUTION_HINT_FIELD)) {
|
||||
if (context.matchField(currentFieldName, SamplerAggregator.EXECUTION_HINT_FIELD)) {
|
||||
String executionHint = parser.text();
|
||||
otherOptions.put(SamplerAggregator.EXECUTION_HINT_FIELD, executionHint);
|
||||
return true;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.significant;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -33,6 +32,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.AbstractTermsParser;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -81,17 +81,18 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
public boolean parseSpecial(String aggregationName, XContentParseContext context, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
|
||||
.lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher);
|
||||
.lookupReturningNullIfNotFound(currentFieldName, context.getParseFieldMatcher());
|
||||
if (significanceHeuristicParser != null) {
|
||||
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher);
|
||||
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(context);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
|
||||
} else if (context.matchField(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(context.getDefaultScriptLanguage(), queriesRegistry,
|
||||
context.getParser(), context.getParseFieldMatcher());
|
||||
Optional<QueryBuilder> filter = queryParseContext.parseInnerQueryBuilder();
|
||||
if (filter.isPresent()) {
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter.get());
|
||||
|
|
|
@ -22,12 +22,12 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -113,13 +113,13 @@ public class GND extends NXYSignificanceHeuristic {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
throws IOException, QueryShardException {
|
||||
public SignificanceHeuristic parse(XContentParseContext context) throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
String givenName = parser.currentName();
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
while (!token.equals(XContentParser.Token.END_OBJECT)) {
|
||||
if (parseFieldMatcher.match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
if (context.matchField(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
parser.nextToken();
|
||||
backgroundIsSuperset = parser.booleanValue();
|
||||
} else {
|
||||
|
|
|
@ -22,12 +22,12 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -104,8 +104,9 @@ public class JLHScore extends SignificanceHeuristic {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException(
|
||||
|
|
|
@ -23,12 +23,12 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -152,17 +152,18 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
|
|||
public abstract static class NXYParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
public SignificanceHeuristic parse(XContentParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
String givenName = parser.currentName();
|
||||
boolean includeNegatives = false;
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
while (!token.equals(XContentParser.Token.END_OBJECT)) {
|
||||
if (parseFieldMatcher.match(parser.currentName(), INCLUDE_NEGATIVES_FIELD)) {
|
||||
if (context.matchField(parser.currentName(), INCLUDE_NEGATIVES_FIELD)) {
|
||||
parser.nextToken();
|
||||
includeNegatives = parser.booleanValue();
|
||||
} else if (parseFieldMatcher.match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
} else if (context.matchField(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
|
||||
parser.nextToken();
|
||||
backgroundIsSuperset = parser.booleanValue();
|
||||
} else {
|
||||
|
|
|
@ -22,12 +22,12 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -56,8 +56,9 @@ public class PercentageScore extends SignificanceHeuristic {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
|
@ -35,6 +34,7 @@ import org.elasticsearch.script.Script.ScriptField;
|
|||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -146,8 +146,9 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
|||
return Objects.equals(script, other.script);
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
XContentParser parser = context.getParser();
|
||||
String heuristicName = parser.currentName();
|
||||
Script script = null;
|
||||
XContentParser.Token token;
|
||||
|
@ -156,8 +157,8 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
|||
if (token.equals(XContentParser.Token.FIELD_NAME)) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (parseFieldMatcher.match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseFieldMatcher);
|
||||
if (context.matchField(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
|
||||
}
|
||||
|
|
|
@ -20,9 +20,9 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -31,6 +31,5 @@ import java.io.IOException;
|
|||
*/
|
||||
@FunctionalInterface
|
||||
public interface SignificanceHeuristicParser {
|
||||
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException,
|
||||
ParsingException;
|
||||
SignificanceHeuristic parse(XContentParseContext context) throws IOException, ParsingException;
|
||||
}
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
@ -89,47 +89,48 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
|||
Map<ParseField, Object> otherOptions);
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (incExcParser.token(currentFieldName, token, parser, parseFieldMatcher, otherOptions)) {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (incExcParser.token(currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
return true;
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, EXECUTION_HINT_FIELD_NAME)) {
|
||||
if (context.matchField(currentFieldName, EXECUTION_HINT_FIELD_NAME)) {
|
||||
otherOptions.put(EXECUTION_HINT_FIELD_NAME, parser.text());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SubAggCollectionMode.KEY)) {
|
||||
otherOptions.put(SubAggCollectionMode.KEY, SubAggCollectionMode.parse(parser.text(), parseFieldMatcher));
|
||||
} else if (context.matchField(currentFieldName, SubAggCollectionMode.KEY)) {
|
||||
otherOptions.put(SubAggCollectionMode.KEY, SubAggCollectionMode.parse(parser.text(), context.getParseFieldMatcher()));
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
} else if (context.matchField(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(REQUIRED_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (parseSpecial(aggregationName, parser, parseFieldMatcher, token, currentFieldName, otherOptions)) {
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
if (context.matchField(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(REQUIRED_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SHARD_SIZE_FIELD_NAME)) {
|
||||
} else if (context.matchField(currentFieldName, SHARD_SIZE_FIELD_NAME)) {
|
||||
otherOptions.put(SHARD_SIZE_FIELD_NAME, parser.intValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
} else if (context.matchField(currentFieldName, MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
otherOptions.put(MIN_DOC_COUNT_FIELD_NAME, parser.longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SHARD_MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
} else if (context.matchField(currentFieldName, SHARD_MIN_DOC_COUNT_FIELD_NAME)) {
|
||||
otherOptions.put(SHARD_MIN_DOC_COUNT_FIELD_NAME, parser.longValue());
|
||||
return true;
|
||||
} else if (parseSpecial(aggregationName, parser, parseFieldMatcher, token, currentFieldName, otherOptions)) {
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
} else if (parseSpecial(aggregationName, parser, parseFieldMatcher, token, currentFieldName, otherOptions)) {
|
||||
} else if (parseSpecial(aggregationName, context, token, currentFieldName, otherOptions)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public abstract boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher,
|
||||
XContentParser.Token token, String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
public abstract boolean parseSpecial(String aggregationName, XContentParseContext context,
|
||||
Token token, String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
|
||||
protected abstract TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds();
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -27,6 +26,7 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -75,15 +75,16 @@ public class TermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
public boolean parseSpecial(String aggregationName, XContentParseContext context, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
List<OrderElement> orderElements = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
|
@ -98,7 +99,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
if (context.matchField(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.avg;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -38,8 +38,8 @@ public class AvgParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,10 +20,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -51,13 +50,13 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue());
|
||||
if (context.matchField(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, context.getParser().longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, REHASH)) {
|
||||
} else if (context.matchField(currentFieldName, REHASH)) {
|
||||
// ignore
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -20,10 +20,10 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.geobounds;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -48,11 +48,11 @@ public class GeoBoundsParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD)) {
|
||||
otherOptions.put(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD, parser.booleanValue());
|
||||
if (context.matchField(currentFieldName, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD)) {
|
||||
otherOptions.put(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD, context.getParser().booleanValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.GeoPointValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -40,8 +39,8 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.max;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -38,8 +38,8 @@ public class MaxParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,10 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.min;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -39,8 +38,8 @@ public class MinParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,10 +21,10 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import com.carrotsearch.hppc.DoubleArrayList;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
@ -45,10 +45,11 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
XContentParser parser = context.getParser();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, keysField())) {
|
||||
if (context.matchField(currentFieldName, keysField())) {
|
||||
DoubleArrayList values = new DoubleArrayList(10);
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
double value = parser.doubleValue();
|
||||
|
@ -61,7 +62,7 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
return false;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, KEYED_FIELD)) {
|
||||
if (context.matchField(currentFieldName, KEYED_FIELD)) {
|
||||
boolean keyed = parser.booleanValue();
|
||||
otherOptions.put(KEYED_FIELD, keyed);
|
||||
return true;
|
||||
|
@ -80,7 +81,7 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, COMPRESSION_FIELD)) {
|
||||
if (context.matchField(currentFieldName, COMPRESSION_FIELD)) {
|
||||
double compression = parser.doubleValue();
|
||||
otherOptions.put(COMPRESSION_FIELD, compression);
|
||||
} else {
|
||||
|
@ -96,7 +97,7 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, NUMBER_SIGNIFICANT_DIGITS_FIELD)) {
|
||||
if (context.matchField(currentFieldName, NUMBER_SIGNIFICANT_DIGITS_FIELD)) {
|
||||
int numberOfSignificantValueDigits = parser.intValue();
|
||||
otherOptions.put(NUMBER_SIGNIFICANT_DIGITS_FIELD, numberOfSignificantValueDigits);
|
||||
} else {
|
||||
|
|
|
@ -232,13 +232,13 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, INIT_SCRIPT_FIELD)) {
|
||||
initScript = Script.parse(parser, context.getParseFieldMatcher());
|
||||
initScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, MAP_SCRIPT_FIELD)) {
|
||||
mapScript = Script.parse(parser, context.getParseFieldMatcher());
|
||||
mapScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, COMBINE_SCRIPT_FIELD)) {
|
||||
combineScript = Script.parse(parser, context.getParseFieldMatcher());
|
||||
combineScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, REDUCE_SCRIPT_FIELD)) {
|
||||
reduceScript = Script.parse(parser, context.getParseFieldMatcher());
|
||||
reduceScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (token == XContentParser.Token.START_OBJECT &&
|
||||
context.getParseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
|
||||
params = parser.map();
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.stats;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -38,8 +38,8 @@ public class StatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.stats.extended;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -38,11 +38,11 @@ public class ExtendedStatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (parseFieldMatcher.match(currentFieldName, ExtendedStatsAggregator.SIGMA_FIELD)) {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (context.matchField(currentFieldName, ExtendedStatsAggregator.SIGMA_FIELD)) {
|
||||
if (token.isValue()) {
|
||||
otherOptions.put(ExtendedStatsAggregator.SIGMA_FIELD, parser.doubleValue());
|
||||
otherOptions.put(ExtendedStatsAggregator.SIGMA_FIELD, context.getParser().doubleValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.sum;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
@ -38,8 +38,8 @@ public class SumParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -622,7 +622,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName,
|
||||
SearchSourceBuilder.IGNORE_FAILURE_FIELD)) {
|
||||
ignoreFailure = parser.booleanValue();
|
||||
|
@ -633,7 +633,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.valuecount;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
@ -40,8 +40,8 @@ public class ValueCountParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -179,7 +179,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
|
|||
} else if (context.getParseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
|
||||
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
||||
|
@ -201,7 +201,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
|
||||
Map<String, Object> map = parser.map();
|
||||
bucketsPathsMap = new HashMap<>();
|
||||
|
@ -260,4 +260,4 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -142,7 +142,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
|||
} else if (context.getParseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
|
||||
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
||||
|
@ -164,7 +164,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
|
||||
Map<String, Object> map = parser.map();
|
||||
bucketsPathsMap = new HashMap<>();
|
||||
|
@ -219,4 +219,4 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -95,6 +94,8 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
Object missing = null;
|
||||
DateTimeZone timezone = null;
|
||||
Map<ParseField, Object> otherOptions = new HashMap<>();
|
||||
XContentParseContext parserContext =
|
||||
new XContentParseContext(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
|
@ -126,22 +127,22 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
+ valueType + "]. It can only work on value of type ["
|
||||
+ targetValueType + "]");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (scriptable && token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
||||
} else if (!token(aggregationName, currentFieldName, token, parserContext, otherOptions)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
|
@ -184,8 +185,7 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
* the target type of the final value output by the aggregation
|
||||
* @param otherOptions
|
||||
* a {@link Map} containing the extra options parsed by the
|
||||
* {@link #token(String, String, org.elasticsearch.common.xcontent.XContentParser.Token,
|
||||
* XContentParser, ParseFieldMatcher, Map)}
|
||||
* {@link #token(String, String, XContentParser.Token, XContentParseContext, Map)}
|
||||
* method
|
||||
* @return the created factory
|
||||
*/
|
||||
|
@ -203,10 +203,8 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
* the name of the current field being parsed
|
||||
* @param token
|
||||
* the current token for the parser
|
||||
* @param parser
|
||||
* the parser
|
||||
* @param parseFieldMatcher
|
||||
* the {@link ParseFieldMatcher} to use to match field names
|
||||
* @param context
|
||||
* the query context
|
||||
* @param otherOptions
|
||||
* a {@link Map} of options to be populated by successive calls
|
||||
* to this method which will then be passed to the
|
||||
|
@ -217,6 +215,6 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
* @throws IOException
|
||||
* if an error occurs whilst parsing
|
||||
*/
|
||||
protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token,
|
||||
XContentParseContext context, Map<ParseField, Object> otherOptions) throws IOException;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
/**
|
||||
* A minimal context for parsing xcontent into aggregation builders.
|
||||
* Only a minimal set of dependencies and settings are available.
|
||||
*/
|
||||
public final class XContentParseContext {
|
||||
|
||||
private final XContentParser parser;
|
||||
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
|
||||
private final String defaultScriptLanguage;
|
||||
|
||||
public XContentParseContext(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String defaultScriptLanguage) {
|
||||
this.parser = parser;
|
||||
this.parseFieldMatcher = parseFieldMatcher;
|
||||
this.defaultScriptLanguage = defaultScriptLanguage;
|
||||
}
|
||||
|
||||
public XContentParser getParser() {
|
||||
return parser;
|
||||
}
|
||||
|
||||
public ParseFieldMatcher getParseFieldMatcher() {
|
||||
return parseFieldMatcher;
|
||||
}
|
||||
|
||||
public String getDefaultScriptLanguage() {
|
||||
return defaultScriptLanguage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the parse field we're looking for matches with the found field name.
|
||||
*
|
||||
* Helper that delegates to {@link ParseFieldMatcher#match(String, ParseField)}.
|
||||
*/
|
||||
public boolean matchField(String fieldName, ParseField parseField) {
|
||||
return parseFieldMatcher.match(fieldName, parseField);
|
||||
}
|
||||
|
||||
}
|
|
@ -1273,7 +1273,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, IGNORE_FAILURE_FIELD)) {
|
||||
ignoreFailure = parser.booleanValue();
|
||||
} else {
|
||||
|
@ -1282,7 +1282,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.getParseFieldMatcher());
|
||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
|
|
|
@ -244,7 +244,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
currentName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseField.match(currentName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseField);
|
||||
script = Script.parse(parser, parseField, context.getDefaultScriptLanguage());
|
||||
} else if (parseField.match(currentName, NESTED_FILTER_FIELD)) {
|
||||
nestedFilter = context.parseInnerQueryBuilder();
|
||||
} else {
|
||||
|
@ -260,7 +260,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
} else if (parseField.match(currentName, NESTED_PATH_FIELD)) {
|
||||
nestedPath = parser.text();
|
||||
} else if (parseField.match(currentName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseField);
|
||||
script = Script.parse(parser, parseField, context.getDefaultScriptLanguage());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
||||
}
|
||||
|
|
|
@ -60,14 +60,6 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
|
||||
public class BulkWithUpdatesIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("script.default_lang", CustomScriptPlugin.NAME)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(CustomScriptPlugin.class);
|
||||
|
@ -557,6 +549,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
" \"script\" : {" +
|
||||
" \"inline\" : \"ctx._source.field2 = 'value2'\"" +
|
||||
" }," +
|
||||
" \"lang\" : \"" + CustomScriptPlugin.NAME + "\"," +
|
||||
" \"upsert\" : {" +
|
||||
" \"field1\" : \"value1'\"" +
|
||||
" }" +
|
||||
|
@ -589,7 +582,9 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
assertThat(bulkResponse.getItems().length, equalTo(3));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
|
||||
assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(false));
|
||||
assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(true));
|
||||
assertThat(bulkResponse.getItems()[2].getFailure().getCause().getCause().getMessage(),
|
||||
equalTo("script_lang not supported [painless]"));
|
||||
|
||||
client().admin().indices().prepareRefresh("test").get();
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
Map<String, Object> params = script.getParams();
|
||||
assertThat(params, nullValue());
|
||||
|
||||
|
@ -67,7 +67,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
params = script.getParams();
|
||||
assertThat(params, nullValue());
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
params = script.getParams();
|
||||
assertThat(params, notNullValue());
|
||||
assertThat(params.size(), equalTo(1));
|
||||
|
@ -92,7 +92,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
params = script.getParams();
|
||||
assertThat(params, notNullValue());
|
||||
assertThat(params.size(), equalTo(1));
|
||||
|
@ -107,7 +107,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
params = script.getParams();
|
||||
assertThat(params, notNullValue());
|
||||
assertThat(params.size(), equalTo(1));
|
||||
|
@ -124,7 +124,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(script, notNullValue());
|
||||
assertThat(script.getScript(), equalTo("script1"));
|
||||
assertThat(script.getType(), equalTo(ScriptType.INLINE));
|
||||
assertThat(script.getLang(), nullValue());
|
||||
assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
|
||||
params = script.getParams();
|
||||
assertThat(params, notNullValue());
|
||||
assertThat(params.size(), equalTo(1));
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class QueryRewriteContextTests extends ESTestCase {
|
||||
|
||||
public void testNewParseContextWithLegacyScriptLanguage() throws Exception {
|
||||
String defaultLegacyScriptLanguage = randomAsciiOfLength(4);
|
||||
IndexMetaData.Builder indexMetadata = new IndexMetaData.Builder("index");
|
||||
indexMetadata.settings(Settings.builder().put("index.version.created", Version.CURRENT)
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 1)
|
||||
);
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new SearchModule(Settings.EMPTY, false, emptyList()).getQueryParserRegistry();
|
||||
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(),
|
||||
Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, defaultLegacyScriptLanguage).build());
|
||||
QueryRewriteContext queryRewriteContext =
|
||||
new QueryRewriteContext(indexSettings, null, null, indicesQueriesRegistry, null, null, null);;
|
||||
|
||||
// verify that the default script language in the query parse context is equal to defaultLegacyScriptLanguage variable:
|
||||
QueryParseContext queryParseContext =
|
||||
queryRewriteContext.newParseContextWithLegacyScriptLanguage(XContentHelper.createParser(new BytesArray("{}")));
|
||||
assertEquals(defaultLegacyScriptLanguage, queryParseContext.getDefaultScriptLanguage());
|
||||
|
||||
// verify that the script query's script language is equal to defaultLegacyScriptLanguage variable:
|
||||
XContentParser parser = XContentHelper.createParser(new BytesArray("{\"script\" : {\"script\": \"return true\"}}"));
|
||||
queryParseContext = queryRewriteContext.newParseContextWithLegacyScriptLanguage(parser);
|
||||
ScriptQueryBuilder queryBuilder = (ScriptQueryBuilder) queryParseContext.parseInnerQueryBuilder().get();
|
||||
assertEquals(defaultLegacyScriptLanguage, queryBuilder.script().getLang());
|
||||
}
|
||||
|
||||
}
|
|
@ -45,6 +45,7 @@ import java.nio.file.Path;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
@ -86,7 +87,9 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
resourceWatcherService = new ResourceWatcherService(baseSettings, null);
|
||||
scriptEngineService = new TestEngineService();
|
||||
dangerousScriptEngineService = new TestDangerousEngineService();
|
||||
scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(Collections.singleton(scriptEngineService));
|
||||
TestEngineService defaultScriptServiceEngine = new TestEngineService(Script.DEFAULT_SCRIPT_LANG) {};
|
||||
scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(
|
||||
new HashSet<>(Arrays.asList(scriptEngineService, defaultScriptServiceEngine)));
|
||||
//randomly register custom script contexts
|
||||
int randomInt = randomIntBetween(0, 3);
|
||||
//prevent duplicates using map
|
||||
|
@ -103,7 +106,8 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
String context = plugin + "_" + operation;
|
||||
contexts.put(context, new ScriptContext.Plugin(plugin, operation));
|
||||
}
|
||||
scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineService, dangerousScriptEngineService));
|
||||
scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineService, dangerousScriptEngineService,
|
||||
defaultScriptServiceEngine));
|
||||
scriptContextRegistry = new ScriptContextRegistry(contexts.values());
|
||||
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]);
|
||||
|
@ -406,12 +410,11 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
|
||||
public void testDefaultLanguage() throws IOException {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put("script.default_lang", "test");
|
||||
builder.put("script.inline", "true");
|
||||
buildScriptService(builder.build());
|
||||
CompiledScript script = scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null),
|
||||
randomFrom(scriptContexts), Collections.emptyMap());
|
||||
assertEquals(script.lang(), "test");
|
||||
assertEquals(script.lang(), Script.DEFAULT_SCRIPT_LANG);
|
||||
}
|
||||
|
||||
public void testStoreScript() throws Exception {
|
||||
|
@ -509,14 +512,24 @@ public class ScriptServiceTests extends ESTestCase {
|
|||
|
||||
public static final String NAME = "test";
|
||||
|
||||
private final String name;
|
||||
|
||||
public TestEngineService() {
|
||||
this(NAME);
|
||||
}
|
||||
|
||||
public TestEngineService(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return NAME;
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getExtension() {
|
||||
return NAME;
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,32 +34,33 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
public class ScriptSettingsTests extends ESTestCase {
|
||||
|
||||
public void testDefaultLanguageIsPainless() {
|
||||
public void testDefaultLegacyLanguageIsPainless() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(Settings.EMPTY), equalTo("painless"));
|
||||
assertThat(scriptSettings.getDefaultLegacyScriptLanguageSetting().get(Settings.EMPTY),
|
||||
equalTo(ScriptSettings.LEGACY_DEFAULT_LANG));
|
||||
}
|
||||
|
||||
public void testCustomDefaultLanguage() {
|
||||
public void testCustomLegacyDefaultLanguage() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
String defaultLanguage = CustomScriptEngineService.NAME;
|
||||
Settings settings = Settings.builder().put("script.default_lang", defaultLanguage).build();
|
||||
assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(settings), equalTo(defaultLanguage));
|
||||
Settings settings = Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, defaultLanguage).build();
|
||||
assertThat(scriptSettings.getDefaultLegacyScriptLanguageSetting().get(settings), equalTo(defaultLanguage));
|
||||
}
|
||||
|
||||
public void testInvalidDefaultLanguage() {
|
||||
public void testInvalidLegacyDefaultLanguage() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
Settings settings = Settings.builder().put("script.default_lang", "C++").build();
|
||||
Settings settings = Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, "C++").build();
|
||||
try {
|
||||
scriptSettings.getDefaultScriptLanguageSetting().get(settings);
|
||||
scriptSettings.getDefaultLegacyScriptLanguageSetting().get(settings);
|
||||
fail("should have seen unregistered default language");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("unregistered default language [C++]"));
|
||||
|
|
|
@ -20,12 +20,10 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -49,6 +47,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
|||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.support.XContentParseContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||
|
||||
|
@ -172,7 +171,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
@Override
|
||||
public List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() {
|
||||
return singletonList(new SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>(SimpleHeuristic.NAME,
|
||||
SimpleHeuristic::new, SimpleHeuristic::parse));
|
||||
SimpleHeuristic::new, (context) -> SimpleHeuristic.parse(context)));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -239,9 +238,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
|
||||
}
|
||||
|
||||
public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
|
||||
public static SignificanceHeuristic parse(XContentParseContext context)
|
||||
throws IOException, QueryShardException {
|
||||
parser.nextToken();
|
||||
context.getParser().nextToken();
|
||||
return new SimpleHeuristic();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -182,7 +182,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(context, null);
|
||||
assertEquals("doc['field_name'].value * factor", builder.script().getScript());
|
||||
assertNull(builder.script().getLang());
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang());
|
||||
assertEquals(1.1, builder.script().getParams().get("factor"));
|
||||
assertEquals(ScriptType.INLINE, builder.script().getType());
|
||||
assertEquals(ScriptSortType.NUMBER, builder.type());
|
||||
|
@ -208,7 +208,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(context, null);
|
||||
assertEquals("doc['field_name'].value", builder.script().getScript());
|
||||
assertNull(builder.script().getLang());
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang());
|
||||
assertNull(builder.script().getParams());
|
||||
assertEquals(ScriptType.INLINE, builder.script().getType());
|
||||
assertEquals(ScriptSortType.NUMBER, builder.type());
|
||||
|
|
|
@ -24,6 +24,10 @@ Instead a <<percolator,percolator field type>> must be configured prior to index
|
|||
Indices with a `.percolator` type created on a version before 5.0.0 can still be used,
|
||||
but new indices no longer accept the `.percolator` type.
|
||||
|
||||
However it is strongly recommended to reindex any indices containing percolator queries created prior
|
||||
upgrading to Elasticsearch 5. By doing this the `percolate` query utilize the extracted terms the `percolator`
|
||||
field type extracted from the percolator queries and potentially execute many times faster.
|
||||
|
||||
==== Percolate document mapping
|
||||
|
||||
The `percolate` query no longer modifies the mappings. Before the percolate API
|
||||
|
@ -53,6 +57,22 @@ The percolate stats have been removed. This is because the percolator no longer
|
|||
The percolator no longer accepts percolator queries containing `range` queries with ranges that are based on current
|
||||
time (using `now`).
|
||||
|
||||
==== Percolator queries containing scripts.
|
||||
|
||||
Percolator queries that contain scripts (For example: `script` query or a `function_score` query script function) that
|
||||
have no explicit language specified will use the Painless scripting language from version 5.0 and up.
|
||||
|
||||
Scripts with no explicit language set in percolator queries stored in indices created prior to version 5.0
|
||||
will use the language that has been configured in the `script.legacy.default_lang` setting. This setting defaults to
|
||||
the Groovy scripting language, which was the default for versions prior to 5.0. If your default scripting language was
|
||||
different then set the `script.legacy.default_lang` setting to the language you used before.
|
||||
|
||||
In order to make use of the new `percolator` field type all percolator queries should be reindexed into a new index.
|
||||
When reindexing percolator queries with scripts that have no explicit language defined into a new index, one of the
|
||||
following two things should be done in order to make the scripts work:
|
||||
* (Recommended approach) While reindexing the percolator documents, migrate the scripts to the Painless scripting language.
|
||||
* or add `lang` parameter on the script and set it the language these scripts were written in.
|
||||
|
||||
==== Java client
|
||||
|
||||
The percolator is no longer part of the core elasticsearch dependency. It has moved to the percolator module.
|
||||
|
|
|
@ -9,8 +9,6 @@ to help make the transition between languages as simple as possible.
|
|||
|
||||
Documentation for Painless can be found at <<modules-scripting-painless,Painless Scripting Language>>
|
||||
|
||||
It is also possible to set the default language back to Groovy using the following setting: `script.default_lang: groovy`
|
||||
|
||||
One common difference to note between Groovy and Painless is the use of parameters -- all parameters in Painless
|
||||
must be prefixed with `params.` now. The following example shows the difference:
|
||||
|
||||
|
@ -48,6 +46,12 @@ Painless (`my_modifer` is prefixed with `params`):
|
|||
}
|
||||
-----------------------------------
|
||||
|
||||
The `script.default_lang` setting has been removed. It is no longer possible set the default scripting language. If a
|
||||
different language than `painless` is used then this should be explicitly specified on the script itself.
|
||||
|
||||
For scripts with no explicit language defined, that are part of already stored percolator queries, the default language
|
||||
can be controlled with the `script.legacy.default_lang` setting.
|
||||
|
||||
==== Removed 1.x script and template syntax
|
||||
|
||||
The deprecated 1.x syntax of defining inline scripts / templates and referring to file or index base scripts / templates
|
||||
|
|
|
@ -515,7 +515,8 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
currentFieldName = sourceParser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
return parseQuery(context, mapUnmappedFieldsAsString, sourceParser);
|
||||
QueryParseContext queryParseContext = context.newParseContextWithLegacyScriptLanguage(sourceParser);
|
||||
return parseQuery(context, mapUnmappedFieldsAsString, queryParseContext, sourceParser);
|
||||
} else {
|
||||
sourceParser.skipChildren();
|
||||
}
|
||||
|
|
|
@ -270,7 +270,9 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation());
|
||||
QueryBuilder queryBuilder = parseQueryBuilder(
|
||||
queryShardContext.newParseContext(parser), parser.getTokenLocation()
|
||||
);
|
||||
verifyQuery(queryBuilder);
|
||||
// Fetching of terms, shapes and indexed scripts happen during this rewrite:
|
||||
queryBuilder = queryBuilder.rewrite(queryShardContext);
|
||||
|
@ -312,7 +314,12 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
public static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException {
|
||||
return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.newParseContext(parser), parser.getTokenLocation()));
|
||||
return parseQuery(context, mapUnmappedFieldsAsString, context.newParseContext(parser), parser);
|
||||
}
|
||||
|
||||
public static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryParseContext queryParseContext,
|
||||
XContentParser parser) throws IOException {
|
||||
return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(queryParseContext, parser.getTokenLocation()));
|
||||
}
|
||||
|
||||
static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.percolator;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
|
@ -28,20 +29,26 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0)
|
||||
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
|
||||
|
@ -52,7 +59,7 @@ public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(PercolatorPlugin.class);
|
||||
return Arrays.asList(PercolatorPlugin.class, FoolMeScriptLang.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -81,25 +88,43 @@ public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase {
|
|||
.setTypes(".percolator")
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(4L));
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("2"));
|
||||
assertThat(searchResponse.getHits().getAt(2).id(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(3).id(), equalTo("4"));
|
||||
assertThat(XContentMapValues.extractValue("query.script.script.inline",
|
||||
searchResponse.getHits().getAt(3).sourceAsMap()), equalTo("return true"));
|
||||
// we don't upgrade the script definitions so that they include explicitly the lang,
|
||||
// because we read / parse the query at search time.
|
||||
assertThat(XContentMapValues.extractValue("query.script.script.lang",
|
||||
searchResponse.getHits().getAt(3).sourceAsMap()), nullValue());
|
||||
|
||||
// verify percolate response
|
||||
PercolateResponse percolateResponse = preparePercolate(client())
|
||||
.setIndices(INDEX_NAME)
|
||||
.setDocumentType("message")
|
||||
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}"))
|
||||
.get();
|
||||
|
||||
assertThat(percolateResponse.getCount(), equalTo(1L));
|
||||
assertThat(percolateResponse.getMatches().length, equalTo(1));
|
||||
assertThat(percolateResponse.getMatches()[0].getId().string(), equalTo("4"));
|
||||
|
||||
percolateResponse = preparePercolate(client())
|
||||
.setIndices(INDEX_NAME)
|
||||
.setDocumentType("message")
|
||||
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("message", "the quick brown fox jumps over the lazy dog"))
|
||||
.get();
|
||||
|
||||
assertThat(percolateResponse.getCount(), equalTo(2L));
|
||||
assertThat(percolateResponse.getMatches().length, equalTo(2));
|
||||
assertThat(percolateResponse.getCount(), equalTo(3L));
|
||||
assertThat(percolateResponse.getMatches().length, equalTo(3));
|
||||
assertThat(percolateResponse.getMatches()[0].getId().string(), equalTo("1"));
|
||||
assertThat(percolateResponse.getMatches()[1].getId().string(), equalTo("2"));
|
||||
assertThat(percolateResponse.getMatches()[2].getId().string(), equalTo("4"));
|
||||
|
||||
// add an extra query and verify the results
|
||||
client().prepareIndex(INDEX_NAME, ".percolator", "4")
|
||||
client().prepareIndex(INDEX_NAME, ".percolator", "5")
|
||||
.setSource(jsonBuilder().startObject().field("query", matchQuery("message", "fox jumps")).endObject())
|
||||
.get();
|
||||
refresh();
|
||||
|
@ -110,8 +135,8 @@ public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase {
|
|||
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("message", "the quick brown fox jumps over the lazy dog"))
|
||||
.get();
|
||||
|
||||
assertThat(percolateResponse.getCount(), equalTo(3L));
|
||||
assertThat(percolateResponse.getMatches().length, equalTo(3));
|
||||
assertThat(percolateResponse.getCount(), equalTo(4L));
|
||||
assertThat(percolateResponse.getMatches().length, equalTo(4));
|
||||
assertThat(percolateResponse.getMatches()[0].getId().string(), equalTo("1"));
|
||||
assertThat(percolateResponse.getMatches()[1].getId().string(), equalTo("2"));
|
||||
assertThat(percolateResponse.getMatches()[2].getId().string(), equalTo("4"));
|
||||
|
@ -131,4 +156,19 @@ public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase {
|
|||
ensureGreen(INDEX_NAME);
|
||||
}
|
||||
|
||||
// Fool the script service that this is the groovy script language,
|
||||
// so that we can run a script that has no lang defined implicetely against the legacy language:
|
||||
public static class FoolMeScriptLang extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.singletonMap("return true", (vars) -> true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String pluginScriptLang() {
|
||||
return "groovy";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,10 +36,14 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
|
@ -61,6 +65,8 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
|||
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
|
||||
import org.elasticsearch.indices.TermsLookup;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
@ -72,6 +78,8 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
@ -100,7 +108,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class);
|
||||
return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class);
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -493,4 +501,71 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper defaultMapper = parser2x.parse("type1", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, defaultMapper.mappingSource().string());
|
||||
}
|
||||
|
||||
public void testImplicitlySetDefaultScriptLang() throws Exception {
|
||||
addQueryMapping();
|
||||
XContentBuilder query = jsonBuilder();
|
||||
query.startObject();
|
||||
query.startObject("script");
|
||||
if (randomBoolean()) {
|
||||
query.field("script", "return true");
|
||||
} else {
|
||||
query.startObject("script");
|
||||
query.field("inline", "return true");
|
||||
query.endObject();
|
||||
}
|
||||
query.endObject();
|
||||
query.endObject();
|
||||
|
||||
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.rawField(fieldName, new BytesArray(query.string()))
|
||||
.endObject().bytes());
|
||||
BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
|
||||
Map<String, Object> parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, XContentMapValues.extractValue("script.script.lang", parsedQuery));
|
||||
|
||||
query = jsonBuilder();
|
||||
query.startObject();
|
||||
query.startObject("function_score");
|
||||
query.startArray("functions");
|
||||
query.startObject();
|
||||
query.startObject("script_score");
|
||||
if (randomBoolean()) {
|
||||
query.field("script", "return true");
|
||||
} else {
|
||||
query.startObject("script");
|
||||
query.field("inline", "return true");
|
||||
query.endObject();
|
||||
}
|
||||
query.endObject();
|
||||
query.endObject();
|
||||
query.endArray();
|
||||
query.endObject();
|
||||
query.endObject();
|
||||
|
||||
doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.rawField(fieldName, new BytesArray(query.string()))
|
||||
.endObject().bytes());
|
||||
querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
|
||||
parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG,
|
||||
((List) XContentMapValues.extractValue("function_score.functions.script_score.script.lang", parsedQuery)).get(0));
|
||||
}
|
||||
|
||||
// Just so that we store scripts in percolator queries, but not really execute these scripts.
|
||||
public static class FoolMeScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.singletonMap("return true", (vars) -> true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String pluginScriptLang() {
|
||||
return Script.DEFAULT_SCRIPT_LANG;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Binary file not shown.
Loading…
Reference in New Issue