mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-13 16:35:45 +00:00
Merge branch 'master' into feature/rank-eval
This commit is contained in:
commit
f927a235b3
@ -19,6 +19,8 @@
|
|||||||
package org.elasticsearch.cluster.metadata;
|
package org.elasticsearch.cluster.metadata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
|
import org.apache.logging.log4j.util.Supplier;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
@ -26,8 +28,8 @@ import org.elasticsearch.common.inject.Inject;
|
|||||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
|
||||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||||
|
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.similarity.SimilarityService;
|
import org.elasticsearch.index.similarity.SimilarityService;
|
||||||
@ -161,7 +163,10 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||||||
|
|
||||||
IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
|
IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
|
||||||
final Settings settings = indexMetaData.getSettings();
|
final Settings settings = indexMetaData.getSettings();
|
||||||
final Settings upgrade = indexScopedSettings.archiveUnknownOrBrokenSettings(settings);
|
final Settings upgrade = indexScopedSettings.archiveUnknownOrInvalidSettings(
|
||||||
|
settings,
|
||||||
|
e -> logger.warn("{} ignoring unknown index setting: [{}] with value [{}]; archiving", indexMetaData.getIndex(), e.getKey(), e.getValue()),
|
||||||
|
(e, ex) -> logger.warn((Supplier<?>) () -> new ParameterizedMessage("{} ignoring invalid index setting: [{}] with value [{}]; archiving", indexMetaData.getIndex(), e.getKey(), e.getValue()), ex));
|
||||||
if (upgrade != settings) {
|
if (upgrade != settings) {
|
||||||
return IndexMetaData.builder(indexMetaData).settings(upgrade).build();
|
return IndexMetaData.builder(indexMetaData).settings(upgrade).build();
|
||||||
} else {
|
} else {
|
||||||
|
@ -498,11 +498,21 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Archives broken or unknown settings. Any setting that is not recognized or fails
|
* Archives invalid or unknown settings. Any setting that is not recognized or fails validation
|
||||||
* validation will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
|
* will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
|
||||||
* and remains in the settings object. This can be used to detect broken settings via APIs.
|
* and remains in the settings object. This can be used to detect invalid settings via APIs.
|
||||||
|
*
|
||||||
|
* @param settings the {@link Settings} instance to scan for unknown or invalid settings
|
||||||
|
* @param unknownConsumer callback on unknown settings (consumer receives unknown key and its
|
||||||
|
* associated value)
|
||||||
|
* @param invalidConsumer callback on invalid settings (consumer receives invalid key, its
|
||||||
|
* associated value and an exception)
|
||||||
|
* @return a {@link Settings} instance with the unknown or invalid settings archived
|
||||||
*/
|
*/
|
||||||
public Settings archiveUnknownOrBrokenSettings(Settings settings) {
|
public Settings archiveUnknownOrInvalidSettings(
|
||||||
|
final Settings settings,
|
||||||
|
final Consumer<Map.Entry<String, String>> unknownConsumer,
|
||||||
|
final BiConsumer<Map.Entry<String, String>, IllegalArgumentException> invalidConsumer) {
|
||||||
Settings.Builder builder = Settings.builder();
|
Settings.Builder builder = Settings.builder();
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
|
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
|
||||||
@ -516,10 +526,10 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||||||
builder.put(entry.getKey(), entry.getValue());
|
builder.put(entry.getKey(), entry.getValue());
|
||||||
} else {
|
} else {
|
||||||
changed = true;
|
changed = true;
|
||||||
logger.warn("found unknown setting: {} value: {} - archiving", entry.getKey(), entry.getValue());
|
unknownConsumer.accept(entry);
|
||||||
/*
|
/*
|
||||||
* We put them back in here such that tools can check from the outside if there are any indices with broken
|
* We put them back in here such that tools can check from the outside if there are any indices with invalid
|
||||||
* settings. The setting can remain there but we want users to be aware that some of their setting are broken and
|
* settings. The setting can remain there but we want users to be aware that some of their setting are invalid and
|
||||||
* they can research why and what they need to do to replace them.
|
* they can research why and what they need to do to replace them.
|
||||||
*/
|
*/
|
||||||
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
|
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
|
||||||
@ -527,12 +537,10 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||||||
}
|
}
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
changed = true;
|
changed = true;
|
||||||
logger.warn(
|
invalidConsumer.accept(entry, ex);
|
||||||
(Supplier<?>) () -> new ParameterizedMessage(
|
|
||||||
"found invalid setting: {} value: {} - archiving", entry.getKey(), entry.getValue()), ex);
|
|
||||||
/*
|
/*
|
||||||
* We put them back in here such that tools can check from the outside if there are any indices with broken settings. The
|
* We put them back in here such that tools can check from the outside if there are any indices with invalid settings. The
|
||||||
* setting can remain there but we want users to be aware that some of their setting are broken and they can research why
|
* setting can remain there but we want users to be aware that some of their setting are invalid and they can research why
|
||||||
* and what they need to do to replace them.
|
* and what they need to do to replace them.
|
||||||
*/
|
*/
|
||||||
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
|
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
|
||||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.gateway;
|
|||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
|
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.action.FailedNodeException;
|
import org.elasticsearch.action.FailedNodeException;
|
||||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
@ -38,6 +37,7 @@ import org.elasticsearch.index.Index;
|
|||||||
import org.elasticsearch.indices.IndicesService;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class Gateway extends AbstractComponent implements ClusterStateListener {
|
public class Gateway extends AbstractComponent implements ClusterStateListener {
|
||||||
@ -146,13 +146,35 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
final ClusterSettings clusterSettings = clusterService.getClusterSettings();
|
final ClusterSettings clusterSettings = clusterService.getClusterSettings();
|
||||||
metaDataBuilder.persistentSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.persistentSettings()));
|
metaDataBuilder.persistentSettings(
|
||||||
metaDataBuilder.transientSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.transientSettings()));
|
clusterSettings.archiveUnknownOrInvalidSettings(
|
||||||
|
metaDataBuilder.persistentSettings(),
|
||||||
|
e -> logUnknownSetting("persistent", e),
|
||||||
|
(e, ex) -> logInvalidSetting("persistent", e, ex)));
|
||||||
|
metaDataBuilder.transientSettings(
|
||||||
|
clusterSettings.archiveUnknownOrInvalidSettings(
|
||||||
|
metaDataBuilder.transientSettings(),
|
||||||
|
e -> logUnknownSetting("transient", e),
|
||||||
|
(e, ex) -> logInvalidSetting("transient", e, ex)));
|
||||||
ClusterState.Builder builder = ClusterState.builder(clusterService.getClusterName());
|
ClusterState.Builder builder = ClusterState.builder(clusterService.getClusterName());
|
||||||
builder.metaData(metaDataBuilder);
|
builder.metaData(metaDataBuilder);
|
||||||
listener.onSuccess(builder.build());
|
listener.onSuccess(builder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void logUnknownSetting(String settingType, Map.Entry<String, String> e) {
|
||||||
|
logger.warn("ignoring unknown {} setting: [{}] with value [{}]; archiving", settingType, e.getKey(), e.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logInvalidSetting(String settingType, Map.Entry<String, String> e, IllegalArgumentException ex) {
|
||||||
|
logger.warn(
|
||||||
|
(org.apache.logging.log4j.util.Supplier<?>)
|
||||||
|
() -> new ParameterizedMessage("ignoring invalid {} setting: [{}] with value [{}]; archiving",
|
||||||
|
settingType,
|
||||||
|
e.getKey(),
|
||||||
|
e.getValue()),
|
||||||
|
ex);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void clusterChanged(final ClusterChangedEvent event) {
|
public void clusterChanged(final ClusterChangedEvent event) {
|
||||||
// order is important, first metaState, and then shardsState
|
// order is important, first metaState, and then shardsState
|
||||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
@ -32,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -189,6 +191,14 @@ public final class Script implements ToXContent, Writeable {
|
|||||||
return parse(parser, parseFieldMatcher, null);
|
return parse(parser, parseFieldMatcher, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Script parse(XContentParser parser, QueryParseContext context) {
|
||||||
|
try {
|
||||||
|
return parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ParsingException(parser.getTokenLocation(), "Error parsing [" + ScriptField.SCRIPT.getPreferredName() + "] field", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static Script parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, @Nullable String lang) throws IOException {
|
public static Script parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, @Nullable String lang) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
// If the parser hasn't yet been pushed to the first token, do it now
|
// If the parser hasn't yet been pushed to the first token, do it now
|
||||||
|
@ -21,11 +21,11 @@ package org.elasticsearch.search.sort;
|
|||||||
|
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
@ -39,17 +39,13 @@ import org.elasticsearch.search.MultiValueMode;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A sort builder to sort based on a document field.
|
* A sort builder to sort based on a document field.
|
||||||
*/
|
*/
|
||||||
public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
||||||
public static final String NAME = "field_sort";
|
public static final String NAME = "field_sort";
|
||||||
public static final ParseField NESTED_PATH = new ParseField("nested_path");
|
|
||||||
public static final ParseField NESTED_FILTER = new ParseField("nested_filter");
|
|
||||||
public static final ParseField MISSING = new ParseField("missing");
|
public static final ParseField MISSING = new ParseField("missing");
|
||||||
public static final ParseField ORDER = new ParseField("order");
|
|
||||||
public static final ParseField SORT_MODE = new ParseField("mode");
|
public static final ParseField SORT_MODE = new ParseField("mode");
|
||||||
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
|
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
|
||||||
|
|
||||||
@ -239,10 +235,10 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||||||
builder.field(SORT_MODE.getPreferredName(), sortMode);
|
builder.field(SORT_MODE.getPreferredName(), sortMode);
|
||||||
}
|
}
|
||||||
if (nestedFilter != null) {
|
if (nestedFilter != null) {
|
||||||
builder.field(NESTED_FILTER.getPreferredName(), nestedFilter, params);
|
builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params);
|
||||||
}
|
}
|
||||||
if (nestedPath != null) {
|
if (nestedPath != null) {
|
||||||
builder.field(NESTED_PATH.getPreferredName(), nestedPath);
|
builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
|
||||||
}
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
@ -327,67 +323,17 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||||||
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
||||||
*/
|
*/
|
||||||
public static FieldSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
|
public static FieldSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
|
||||||
XContentParser parser = context.parser();
|
return PARSER.parse(context.parser(), new FieldSortBuilder(fieldName), context);
|
||||||
|
}
|
||||||
|
|
||||||
Optional<QueryBuilder> nestedFilter = Optional.empty();
|
private static ObjectParser<FieldSortBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME);
|
||||||
String nestedPath = null;
|
|
||||||
Object missing = null;
|
|
||||||
SortOrder order = null;
|
|
||||||
SortMode sortMode = null;
|
|
||||||
String unmappedType = null;
|
|
||||||
|
|
||||||
String currentFieldName = null;
|
static {
|
||||||
XContentParser.Token token;
|
PARSER.declareField(FieldSortBuilder::missing, p -> p.objectText(), MISSING, ValueType.VALUE);
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
PARSER.declareString(FieldSortBuilder::setNestedPath , NESTED_PATH_FIELD);
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
PARSER.declareString(FieldSortBuilder::unmappedType , UNMAPPED_TYPE);
|
||||||
currentFieldName = parser.currentName();
|
PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)) , ORDER_FIELD);
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORT_MODE);
|
||||||
if (context.getParseFieldMatcher().match(currentFieldName, NESTED_FILTER)) {
|
PARSER.declareObject(FieldSortBuilder::setNestedFilter, SortBuilder::parseNestedFilter, NESTED_FILTER_FIELD);
|
||||||
nestedFilter = context.parseInnerQueryBuilder();
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Expected " + NESTED_FILTER.getPreferredName() + " element.");
|
|
||||||
}
|
|
||||||
} else if (token.isValue()) {
|
|
||||||
if (context.getParseFieldMatcher().match(currentFieldName, NESTED_PATH)) {
|
|
||||||
nestedPath = parser.text();
|
|
||||||
} else if (context.getParseFieldMatcher().match(currentFieldName, MISSING)) {
|
|
||||||
missing = parser.objectText();
|
|
||||||
} else if (context.getParseFieldMatcher().match(currentFieldName, ORDER)) {
|
|
||||||
String sortOrder = parser.text();
|
|
||||||
if ("asc".equals(sortOrder)) {
|
|
||||||
order = SortOrder.ASC;
|
|
||||||
} else if ("desc".equals(sortOrder)) {
|
|
||||||
order = SortOrder.DESC;
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Sort order [{}] not supported.", sortOrder);
|
|
||||||
}
|
|
||||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_MODE)) {
|
|
||||||
sortMode = SortMode.fromString(parser.text());
|
|
||||||
} else if (context.getParseFieldMatcher().match(currentFieldName, UNMAPPED_TYPE)) {
|
|
||||||
unmappedType = parser.text();
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Option [{}] not supported.", currentFieldName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
FieldSortBuilder builder = new FieldSortBuilder(fieldName);
|
|
||||||
nestedFilter.ifPresent(builder::setNestedFilter);
|
|
||||||
if (nestedPath != null) {
|
|
||||||
builder.setNestedPath(nestedPath);
|
|
||||||
}
|
|
||||||
if (missing != null) {
|
|
||||||
builder.missing(missing);
|
|
||||||
}
|
|
||||||
if (order != null) {
|
|
||||||
builder.order(order);
|
|
||||||
}
|
|
||||||
if (sortMode != null) {
|
|
||||||
builder.sortMode(sortMode);
|
|
||||||
}
|
|
||||||
if (unmappedType != null) {
|
|
||||||
builder.unmappedType(unmappedType);
|
|
||||||
}
|
|
||||||
return builder;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -80,8 +80,6 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||||||
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize")
|
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize")
|
||||||
.withAllDeprecated("use validation_method instead");
|
.withAllDeprecated("use validation_method instead");
|
||||||
private static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode");
|
private static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode");
|
||||||
private static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
|
|
||||||
private static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
|
|
||||||
|
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
private final List<GeoPoint> points = new ArrayList<>();
|
private final List<GeoPoint> points = new ArrayList<>();
|
||||||
@ -511,7 +509,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||||||
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
|
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
|
||||||
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
List<GeoPoint> localPoints = new ArrayList<GeoPoint>();
|
List<GeoPoint> localPoints = new ArrayList<>();
|
||||||
for (GeoPoint geoPoint : this.points) {
|
for (GeoPoint geoPoint : this.points) {
|
||||||
localPoints.add(new GeoPoint(geoPoint));
|
localPoints.add(new GeoPoint(geoPoint));
|
||||||
}
|
}
|
||||||
|
@ -20,13 +20,10 @@
|
|||||||
package org.elasticsearch.search.sort;
|
package org.elasticsearch.search.sort;
|
||||||
|
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.common.ParseField;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
@ -40,7 +37,6 @@ import java.util.Objects;
|
|||||||
public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
|
public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
|
||||||
|
|
||||||
public static final String NAME = "_score";
|
public static final String NAME = "_score";
|
||||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
|
||||||
private static final SortFieldAndFormat SORT_SCORE = new SortFieldAndFormat(
|
private static final SortFieldAndFormat SORT_SCORE = new SortFieldAndFormat(
|
||||||
new SortField(null, SortField.Type.SCORE), DocValueFormat.RAW);
|
new SortField(null, SortField.Type.SCORE), DocValueFormat.RAW);
|
||||||
private static final SortFieldAndFormat SORT_SCORE_REVERSE = new SortFieldAndFormat(
|
private static final SortFieldAndFormat SORT_SCORE_REVERSE = new SortFieldAndFormat(
|
||||||
@ -86,26 +82,13 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
|
|||||||
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
||||||
*/
|
*/
|
||||||
public static ScoreSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
|
public static ScoreSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
|
||||||
XContentParser parser = context.parser();
|
return PARSER.apply(context.parser(), context);
|
||||||
ParseFieldMatcher matcher = context.getParseFieldMatcher();
|
}
|
||||||
|
|
||||||
XContentParser.Token token;
|
private static ObjectParser<ScoreSortBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME, ScoreSortBuilder::new);
|
||||||
String currentName = parser.currentName();
|
|
||||||
ScoreSortBuilder result = new ScoreSortBuilder();
|
static {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
PARSER.declareString((builder, order) -> builder.order(SortOrder.fromString(order)), ORDER_FIELD);
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
|
||||||
currentName = parser.currentName();
|
|
||||||
} else if (token.isValue()) {
|
|
||||||
if (matcher.match(currentName, ORDER_FIELD)) {
|
|
||||||
result.order(SortOrder.fromString(parser.text()));
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unexpected token [" + token + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -26,13 +26,12 @@ import org.apache.lucene.search.SortField;
|
|||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefBuilder;
|
import org.apache.lucene.util.BytesRefBuilder;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.index.fielddata.FieldData;
|
import org.elasticsearch.index.fielddata.FieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||||
@ -57,7 +56,8 @@ import java.io.IOException;
|
|||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Script sort builder allows to sort based on a custom script expression.
|
* Script sort builder allows to sort based on a custom script expression.
|
||||||
@ -68,8 +68,6 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||||||
public static final ParseField TYPE_FIELD = new ParseField("type");
|
public static final ParseField TYPE_FIELD = new ParseField("type");
|
||||||
public static final ParseField SCRIPT_FIELD = new ParseField("script");
|
public static final ParseField SCRIPT_FIELD = new ParseField("script");
|
||||||
public static final ParseField SORTMODE_FIELD = new ParseField("mode");
|
public static final ParseField SORTMODE_FIELD = new ParseField("mode");
|
||||||
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
|
|
||||||
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
|
|
||||||
|
|
||||||
private final Script script;
|
private final Script script;
|
||||||
|
|
||||||
@ -216,6 +214,18 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static ConstructingObjectParser<ScriptSortBuilder, QueryParseContext> PARSER = new ConstructingObjectParser<>(NAME,
|
||||||
|
a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareField(constructorArg(), Script::parse, ScriptField.SCRIPT, ValueType.OBJECT_OR_STRING);
|
||||||
|
PARSER.declareField(constructorArg(), p -> ScriptSortType.fromString(p.text()), TYPE_FIELD, ValueType.STRING);
|
||||||
|
PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD);
|
||||||
|
PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD);
|
||||||
|
PARSER.declareString(ScriptSortBuilder::setNestedPath , NESTED_PATH_FIELD);
|
||||||
|
PARSER.declareObject(ScriptSortBuilder::setNestedFilter, SortBuilder::parseNestedFilter, NESTED_FILTER_FIELD);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new {@link ScriptSortBuilder} from the query held by the {@link QueryParseContext} in
|
* Creates a new {@link ScriptSortBuilder} from the query held by the {@link QueryParseContext} in
|
||||||
* {@link org.elasticsearch.common.xcontent.XContent} format.
|
* {@link org.elasticsearch.common.xcontent.XContent} format.
|
||||||
@ -226,59 +236,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||||||
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
|
||||||
*/
|
*/
|
||||||
public static ScriptSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
public static ScriptSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
||||||
XContentParser parser = context.parser();
|
return PARSER.apply(context.parser(), context);
|
||||||
ParseFieldMatcher parseField = context.getParseFieldMatcher();
|
|
||||||
Script script = null;
|
|
||||||
ScriptSortType type = null;
|
|
||||||
SortMode sortMode = null;
|
|
||||||
SortOrder order = null;
|
|
||||||
Optional<QueryBuilder> nestedFilter = Optional.empty();
|
|
||||||
String nestedPath = null;
|
|
||||||
|
|
||||||
XContentParser.Token token;
|
|
||||||
String currentName = parser.currentName();
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
|
||||||
currentName = parser.currentName();
|
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
|
||||||
if (parseField.match(currentName, ScriptField.SCRIPT)) {
|
|
||||||
script = Script.parse(parser, parseField, context.getDefaultScriptLanguage());
|
|
||||||
} else if (parseField.match(currentName, NESTED_FILTER_FIELD)) {
|
|
||||||
nestedFilter = context.parseInnerQueryBuilder();
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
|
||||||
}
|
|
||||||
} else if (token.isValue()) {
|
|
||||||
if (parseField.match(currentName, ORDER_FIELD)) {
|
|
||||||
order = SortOrder.fromString(parser.text());
|
|
||||||
} else if (parseField.match(currentName, TYPE_FIELD)) {
|
|
||||||
type = ScriptSortType.fromString(parser.text());
|
|
||||||
} else if (parseField.match(currentName, SORTMODE_FIELD)) {
|
|
||||||
sortMode = SortMode.fromString(parser.text());
|
|
||||||
} else if (parseField.match(currentName, NESTED_PATH_FIELD)) {
|
|
||||||
nestedPath = parser.text();
|
|
||||||
} else if (parseField.match(currentName, ScriptField.SCRIPT)) {
|
|
||||||
script = Script.parse(parser, parseField, context.getDefaultScriptLanguage());
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unexpected token [" + token + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ScriptSortBuilder result = new ScriptSortBuilder(script, type);
|
|
||||||
if (order != null) {
|
|
||||||
result.order(order);
|
|
||||||
}
|
|
||||||
if (sortMode != null) {
|
|
||||||
result.sortMode(sortMode);
|
|
||||||
}
|
|
||||||
nestedFilter.ifPresent(result::setNestedFilter);
|
|
||||||
if (nestedPath != null) {
|
|
||||||
result.setNestedPath(nestedPath);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.SortField;
|
|||||||
import org.apache.lucene.search.join.BitSetProducer;
|
import org.apache.lucene.search.join.BitSetProducer;
|
||||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
@ -49,7 +50,11 @@ import static java.util.Collections.unmodifiableMap;
|
|||||||
public abstract class SortBuilder<T extends SortBuilder<T>> extends ToXContentToBytes implements NamedWriteable {
|
public abstract class SortBuilder<T extends SortBuilder<T>> extends ToXContentToBytes implements NamedWriteable {
|
||||||
|
|
||||||
protected SortOrder order = SortOrder.ASC;
|
protected SortOrder order = SortOrder.ASC;
|
||||||
|
|
||||||
|
// parse fields common to more than one SortBuilder
|
||||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
public static final ParseField ORDER_FIELD = new ParseField("order");
|
||||||
|
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
|
||||||
|
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
|
||||||
|
|
||||||
private static final Map<String, Parser<?>> PARSERS;
|
private static final Map<String, Parser<?>> PARSERS;
|
||||||
static {
|
static {
|
||||||
@ -196,6 +201,16 @@ public abstract class SortBuilder<T extends SortBuilder<T>> extends ToXContentTo
|
|||||||
return nested;
|
return nested;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected static QueryBuilder parseNestedFilter(XContentParser parser, QueryParseContext context) {
|
||||||
|
try {
|
||||||
|
QueryBuilder builder = context.parseInnerQueryBuilder().orElseThrow(() -> new ParsingException(parser.getTokenLocation(),
|
||||||
|
"Expected " + NESTED_FILTER_FIELD.getPreferredName() + " element."));
|
||||||
|
return builder;
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ParsingException(parser.getTokenLocation(), "Expected " + NESTED_FILTER_FIELD.getPreferredName() + " element.", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@FunctionalInterface
|
@FunctionalInterface
|
||||||
private interface Parser<T extends SortBuilder<?>> {
|
private interface Parser<T extends SortBuilder<?>> {
|
||||||
T fromXContent(QueryParseContext context, String elementName) throws IOException;
|
T fromXContent(QueryParseContext context, String elementName) throws IOException;
|
||||||
|
@ -16,11 +16,11 @@
|
|||||||
* specific language governing permissions and limitations
|
* specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
|
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
@ -29,18 +29,20 @@ import org.elasticsearch.common.settings.Settings;
|
|||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.translog.Translog;
|
import org.elasticsearch.index.translog.Translog;
|
||||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
|
import static org.hamcrest.core.StringContains.containsString;
|
||||||
|
import static org.hamcrest.object.HasToString.hasToString;
|
||||||
|
|
||||||
public class IndexSettingsTests extends ESTestCase {
|
public class IndexSettingsTests extends ESTestCase {
|
||||||
|
|
||||||
public void testRunListener() {
|
public void testRunListener() {
|
||||||
@ -348,26 +350,48 @@ public class IndexSettingsTests extends ESTestCase {
|
|||||||
assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize());
|
assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void testArchiveBrokenIndexSettings() {
|
public void testArchiveBrokenIndexSettings() {
|
||||||
Settings settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.EMPTY);
|
Settings settings =
|
||||||
|
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
|
||||||
|
Settings.EMPTY,
|
||||||
|
e -> { assert false : "should not have been invoked, no unknown settings"; },
|
||||||
|
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
|
||||||
assertSame(settings, Settings.EMPTY);
|
assertSame(settings, Settings.EMPTY);
|
||||||
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.builder()
|
settings =
|
||||||
.put("index.refresh_interval", "-200").build());
|
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
|
||||||
|
Settings.builder().put("index.refresh_interval", "-200").build(),
|
||||||
|
e -> { assert false : "should not have been invoked, no invalid settings"; },
|
||||||
|
(e, ex) -> {
|
||||||
|
assertThat(e.getKey(), equalTo("index.refresh_interval"));
|
||||||
|
assertThat(e.getValue(), equalTo("-200"));
|
||||||
|
assertThat(ex, hasToString(containsString("failed to parse setting [index.refresh_interval] with value [-200]")));
|
||||||
|
});
|
||||||
assertEquals("-200", settings.get("archived.index.refresh_interval"));
|
assertEquals("-200", settings.get("archived.index.refresh_interval"));
|
||||||
assertNull(settings.get("index.refresh_interval"));
|
assertNull(settings.get("index.refresh_interval"));
|
||||||
|
|
||||||
Settings prevSettings = settings; // no double archive
|
Settings prevSettings = settings; // no double archive
|
||||||
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(prevSettings);
|
settings =
|
||||||
|
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
|
||||||
|
prevSettings,
|
||||||
|
e -> { assert false : "should not have been invoked, no unknown settings"; },
|
||||||
|
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
|
||||||
assertSame(prevSettings, settings);
|
assertSame(prevSettings, settings);
|
||||||
|
|
||||||
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.builder()
|
settings =
|
||||||
.put("index.version.created", Version.CURRENT.id) // private setting
|
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
|
||||||
.put("index.unknown", "foo")
|
Settings.builder()
|
||||||
.put("index.refresh_interval", "2s").build());
|
.put("index.version.created", Version.CURRENT.id) // private setting
|
||||||
|
.put("index.unknown", "foo")
|
||||||
|
.put("index.refresh_interval", "2s").build(),
|
||||||
|
e -> {
|
||||||
|
assertThat(e.getKey(), equalTo("index.unknown"));
|
||||||
|
assertThat(e.getValue(), equalTo("foo"));
|
||||||
|
},
|
||||||
|
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
|
||||||
|
|
||||||
assertEquals("foo", settings.get("archived.index.unknown"));
|
assertEquals("foo", settings.get("archived.index.unknown"));
|
||||||
assertEquals(Integer.toString(Version.CURRENT.id), settings.get("index.version.created"));
|
assertEquals(Integer.toString(Version.CURRENT.id), settings.get("index.version.created"));
|
||||||
assertEquals("2s", settings.get("index.refresh_interval"));
|
assertEquals("2s", settings.get("index.refresh_interval"));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -41,10 +41,10 @@ import org.elasticsearch.index.IndexSettings;
|
|||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||||
import org.elasticsearch.index.mapper.ContentPath;
|
import org.elasticsearch.index.mapper.ContentPath;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType;
|
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||||
|
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||||
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
|
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
|
||||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||||
@ -245,7 +245,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
|
|||||||
@Override
|
@Override
|
||||||
public ObjectMapper getObjectMapper(String name) {
|
public ObjectMapper getObjectMapper(String name) {
|
||||||
BuilderContext context = new BuilderContext(this.getIndexSettings().getSettings(), new ContentPath());
|
BuilderContext context = new BuilderContext(this.getIndexSettings().getSettings(), new ContentPath());
|
||||||
return (ObjectMapper) new ObjectMapper.Builder<>(name).nested(Nested.newNested(false, false)).build(context);
|
return new ObjectMapper.Builder<>(name).nested(Nested.newNested(false, false)).build(context);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.sort;
|
|||||||
|
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
@ -140,7 +139,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
|
|||||||
try {
|
try {
|
||||||
FieldSortBuilder.fromXContent(context, "");
|
FieldSortBuilder.fromXContent(context, "");
|
||||||
fail("adding reverse sorting option should fail with an exception");
|
fail("adding reverse sorting option should fail with an exception");
|
||||||
} catch (ParsingException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
// all good
|
// all good
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.sort;
|
|||||||
|
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
@ -93,7 +92,7 @@ public class ScoreSortBuilderTests extends AbstractSortTestCase<ScoreSortBuilder
|
|||||||
try {
|
try {
|
||||||
ScoreSortBuilder.fromXContent(context, "_score");
|
ScoreSortBuilder.fromXContent(context, "_score");
|
||||||
fail("adding reverse sorting option should fail with an exception");
|
fail("adding reverse sorting option should fail with an exception");
|
||||||
} catch (ParsingException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
// all good
|
// all good
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.sort;
|
|||||||
|
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
@ -30,8 +29,6 @@ import org.elasticsearch.script.Script;
|
|||||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||||
import org.junit.Rule;
|
|
||||||
import org.junit.rules.ExpectedException;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
@ -146,19 +143,14 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||||||
assertEquals(ScriptSortType.NUMBER, ScriptSortType.fromString("NUMBER"));
|
assertEquals(ScriptSortType.NUMBER, ScriptSortType.fromString("NUMBER"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Rule
|
|
||||||
public ExpectedException exceptionRule = ExpectedException.none();
|
|
||||||
|
|
||||||
public void testScriptSortTypeNull() {
|
public void testScriptSortTypeNull() {
|
||||||
exceptionRule.expect(NullPointerException.class);
|
Exception e = expectThrows(NullPointerException.class, () -> ScriptSortType.fromString(null));
|
||||||
exceptionRule.expectMessage("input string is null");
|
assertEquals("input string is null", e.getMessage());
|
||||||
ScriptSortType.fromString(null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testScriptSortTypeIllegalArgument() {
|
public void testScriptSortTypeIllegalArgument() {
|
||||||
exceptionRule.expect(IllegalArgumentException.class);
|
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortType.fromString("xyz"));
|
||||||
exceptionRule.expectMessage("Unknown ScriptSortType [xyz]");
|
assertEquals("Unknown ScriptSortType [xyz]", e.getMessage());
|
||||||
ScriptSortType.fromString("xyz");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseJson() throws IOException {
|
public void testParseJson() throws IOException {
|
||||||
@ -226,9 +218,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||||
exceptionRule.expect(ParsingException.class);
|
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(context, null));
|
||||||
exceptionRule.expectMessage("failed to parse field [bad_field]");
|
assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||||
ScriptSortBuilder.fromXContent(context, null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseBadFieldNameExceptionsOnStartObject() throws IOException {
|
public void testParseBadFieldNameExceptionsOnStartObject() throws IOException {
|
||||||
@ -240,9 +231,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||||
exceptionRule.expect(ParsingException.class);
|
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(context, null));
|
||||||
exceptionRule.expectMessage("failed to parse field [bad_field]");
|
assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||||
ScriptSortBuilder.fromXContent(context, null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseUnexpectedToken() throws IOException {
|
public void testParseUnexpectedToken() throws IOException {
|
||||||
@ -253,9 +243,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||||
exceptionRule.expect(ParsingException.class);
|
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(context, null));
|
||||||
exceptionRule.expectMessage("unexpected token [START_ARRAY]");
|
assertEquals("[_script] script doesn't support values of type: START_ARRAY", e.getMessage());
|
||||||
ScriptSortBuilder.fromXContent(context, null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -263,9 +252,9 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||||||
*/
|
*/
|
||||||
public void testBadSortMode() throws IOException {
|
public void testBadSortMode() throws IOException {
|
||||||
ScriptSortBuilder builder = new ScriptSortBuilder(new Script("something"), ScriptSortType.STRING);
|
ScriptSortBuilder builder = new ScriptSortBuilder(new Script("something"), ScriptSortType.STRING);
|
||||||
exceptionRule.expect(IllegalArgumentException.class);
|
String sortMode = randomFrom(new String[] { "avg", "median", "sum" });
|
||||||
exceptionRule.expectMessage("script sort of type [string] doesn't support mode");
|
Exception e = expectThrows(IllegalArgumentException.class, () -> builder.sortMode(SortMode.fromString(sortMode)));
|
||||||
builder.sortMode(SortMode.fromString(randomFrom(new String[]{"avg", "median", "sum"})));
|
assertEquals("script sort of type [string] doesn't support mode [" + sortMode + "]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -93,7 +93,6 @@ buildRestTests.expectedUnconvertedCandidates = [
|
|||||||
'reference/analysis/tokenfilters/stop-tokenfilter.asciidoc',
|
'reference/analysis/tokenfilters/stop-tokenfilter.asciidoc',
|
||||||
'reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc',
|
'reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc',
|
||||||
'reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc',
|
'reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc',
|
||||||
'reference/cat/pending_tasks.asciidoc',
|
|
||||||
'reference/cat/plugins.asciidoc',
|
'reference/cat/plugins.asciidoc',
|
||||||
'reference/cat/recovery.asciidoc',
|
'reference/cat/recovery.asciidoc',
|
||||||
'reference/cat/repositories.asciidoc',
|
'reference/cat/repositories.asciidoc',
|
||||||
|
@ -3,11 +3,18 @@
|
|||||||
|
|
||||||
`pending_tasks` provides the same information as the
|
`pending_tasks` provides the same information as the
|
||||||
<<cluster-pending,`/_cluster/pending_tasks`>> API in a
|
<<cluster-pending,`/_cluster/pending_tasks`>> API in a
|
||||||
convenient tabular format.
|
convenient tabular format. For example:
|
||||||
|
|
||||||
[source,sh]
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
GET /_cat/pending_tasks?v
|
||||||
|
--------------------------------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
Might look like:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
% curl 'localhost:9200/_cat/pending_tasks?v'
|
|
||||||
insertOrder timeInQueue priority source
|
insertOrder timeInQueue priority source
|
||||||
1685 855ms HIGH update-mapping [foo][t]
|
1685 855ms HIGH update-mapping [foo][t]
|
||||||
1686 843ms HIGH update-mapping [foo][t]
|
1686 843ms HIGH update-mapping [foo][t]
|
||||||
@ -17,3 +24,6 @@ insertOrder timeInQueue priority source
|
|||||||
1690 787ms HIGH update-mapping [foo][t]
|
1690 787ms HIGH update-mapping [foo][t]
|
||||||
1691 773ms HIGH update-mapping [foo][t]
|
1691 773ms HIGH update-mapping [foo][t]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
// TESTRESPONSE[s/(\n.+)+/(\\n.+)*/ _cat]
|
||||||
|
// We can't assert anything about the tasks in progress here because we don't
|
||||||
|
// know what might be in progress....
|
||||||
|
@ -185,8 +185,8 @@ final class Netty4HttpChannel extends AbstractRestChannel {
|
|||||||
// Determine if the request connection should be closed on completion.
|
// Determine if the request connection should be closed on completion.
|
||||||
private boolean isCloseConnection() {
|
private boolean isCloseConnection() {
|
||||||
final boolean http10 = isHttp10();
|
final boolean http10 = isHttp10();
|
||||||
return HttpHeaderValues.CLOSE.equals(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)) ||
|
return HttpHeaderValues.CLOSE.contentEqualsIgnoreCase(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)) ||
|
||||||
(http10 && HttpHeaderValues.KEEP_ALIVE.equals(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)) == false);
|
(http10 && !HttpHeaderValues.KEEP_ALIVE.contentEqualsIgnoreCase(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new {@link HttpResponse} to transmit the response for the netty request.
|
// Create a new {@link HttpResponse} to transmit the response for the netty request.
|
||||||
|
@ -30,10 +30,12 @@ import io.netty.channel.ChannelPipeline;
|
|||||||
import io.netty.channel.ChannelProgressivePromise;
|
import io.netty.channel.ChannelProgressivePromise;
|
||||||
import io.netty.channel.ChannelPromise;
|
import io.netty.channel.ChannelPromise;
|
||||||
import io.netty.channel.EventLoop;
|
import io.netty.channel.EventLoop;
|
||||||
|
import io.netty.channel.embedded.EmbeddedChannel;
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpRequest;
|
import io.netty.handler.codec.http.DefaultFullHttpRequest;
|
||||||
import io.netty.handler.codec.http.FullHttpRequest;
|
import io.netty.handler.codec.http.FullHttpRequest;
|
||||||
import io.netty.handler.codec.http.FullHttpResponse;
|
import io.netty.handler.codec.http.FullHttpResponse;
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderValues;
|
||||||
import io.netty.handler.codec.http.HttpMethod;
|
import io.netty.handler.codec.http.HttpMethod;
|
||||||
import io.netty.handler.codec.http.HttpResponse;
|
import io.netty.handler.codec.http.HttpResponse;
|
||||||
import io.netty.handler.codec.http.HttpVersion;
|
import io.netty.handler.codec.http.HttpVersion;
|
||||||
@ -212,6 +214,37 @@ public class Netty4HttpChannelTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testConnectionClose() throws Exception {
|
||||||
|
final Settings settings = Settings.builder().build();
|
||||||
|
try (Netty4HttpServerTransport httpServerTransport =
|
||||||
|
new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) {
|
||||||
|
httpServerTransport.start();
|
||||||
|
final FullHttpRequest httpRequest;
|
||||||
|
final boolean close = randomBoolean();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
|
||||||
|
if (close) {
|
||||||
|
httpRequest.headers().add(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_0, HttpMethod.GET, "/");
|
||||||
|
if (!close) {
|
||||||
|
httpRequest.headers().add(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final EmbeddedChannel embeddedChannel = new EmbeddedChannel();
|
||||||
|
final Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, embeddedChannel);
|
||||||
|
|
||||||
|
// send a response, the channel close status should match
|
||||||
|
assertTrue(embeddedChannel.isOpen());
|
||||||
|
final Netty4HttpChannel channel =
|
||||||
|
new Netty4HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext());
|
||||||
|
final TestResponse resp = new TestResponse();
|
||||||
|
channel.sendResponse(resp);
|
||||||
|
assertThat(embeddedChannel.isOpen(), equalTo(!close));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private FullHttpResponse executeRequest(final Settings settings, final String host) {
|
private FullHttpResponse executeRequest(final Settings settings, final String host) {
|
||||||
return executeRequest(settings, null, host);
|
return executeRequest(settings, null, host);
|
||||||
}
|
}
|
||||||
|
@ -4,5 +4,5 @@ logger.com_amazonaws.level = warn
|
|||||||
logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport
|
logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport
|
||||||
logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error
|
logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error
|
||||||
|
|
||||||
logger_com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics
|
logger.com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics
|
||||||
logger_com_amazonaws_metrics_AwsSdkMetrics.level = error
|
logger.com_amazonaws_metrics_AwsSdkMetrics.level = error
|
||||||
|
@ -10,7 +10,7 @@ Elasticsearch as follows:
|
|||||||
|
|
||||||
[source,sh]
|
[source,sh]
|
||||||
---------------------
|
---------------------
|
||||||
bin/elasticsearch -E script.inline true -E node.attr.testattr test -E path.repo /tmp -E repositories.url.allowed_urls 'http://snapshot.*'
|
bin/elasticsearch -Enode.attr.testattr=test -Epath.repo=/tmp -Erepositories.url.allowed_urls='http://snapshot.*'
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
=======================================
|
=======================================
|
||||||
|
@ -159,7 +159,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected static String[] getCurrentTypes() {
|
protected static String[] getCurrentTypes() {
|
||||||
return currentTypes;
|
return currentTypes == null ? Strings.EMPTY_ARRAY : currentTypes;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
@ -42,7 +42,11 @@ public class ClientYamlSuiteRestApiParser {
|
|||||||
if ("methods".equals(parser.currentName())) {
|
if ("methods".equals(parser.currentName())) {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
while (parser.nextToken() == XContentParser.Token.VALUE_STRING) {
|
while (parser.nextToken() == XContentParser.Token.VALUE_STRING) {
|
||||||
restApi.addMethod(parser.text());
|
String method = parser.text();
|
||||||
|
if (restApi.getMethods().contains(method)) {
|
||||||
|
throw new IllegalArgumentException("Found duplicate method [" + method + "]");
|
||||||
|
}
|
||||||
|
restApi.addMethod(method);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,16 +60,24 @@ public class ClientYamlSuiteRestApiParser {
|
|||||||
|
|
||||||
if (parser.currentToken() == XContentParser.Token.START_ARRAY && "paths".equals(currentFieldName)) {
|
if (parser.currentToken() == XContentParser.Token.START_ARRAY && "paths".equals(currentFieldName)) {
|
||||||
while (parser.nextToken() == XContentParser.Token.VALUE_STRING) {
|
while (parser.nextToken() == XContentParser.Token.VALUE_STRING) {
|
||||||
restApi.addPath(parser.text());
|
String path = parser.text();
|
||||||
|
if (restApi.getPaths().contains(path)) {
|
||||||
|
throw new IllegalArgumentException("Found duplicate path [" + path + "]");
|
||||||
|
}
|
||||||
|
restApi.addPath(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "parts".equals(currentFieldName)) {
|
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "parts".equals(currentFieldName)) {
|
||||||
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||||
restApi.addPathPart(parser.currentName());
|
String part = parser.currentName();
|
||||||
|
if (restApi.getPathParts().contains(part)) {
|
||||||
|
throw new IllegalArgumentException("Found duplicate part [" + part + "]");
|
||||||
|
}
|
||||||
|
restApi.addPathPart(part);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new IOException("Expected parts field in rest api definition to contain an object");
|
throw new IllegalArgumentException("Expected parts field in rest api definition to contain an object");
|
||||||
}
|
}
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
@ -73,10 +85,14 @@ public class ClientYamlSuiteRestApiParser {
|
|||||||
|
|
||||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "params".equals(currentFieldName)) {
|
if (parser.currentToken() == XContentParser.Token.START_OBJECT && "params".equals(currentFieldName)) {
|
||||||
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||||
|
String param = parser.currentName();
|
||||||
|
if (restApi.getParams().contains(param)) {
|
||||||
|
throw new IllegalArgumentException("Found duplicate param [" + param + "]");
|
||||||
|
}
|
||||||
restApi.addParam(parser.currentName());
|
restApi.addParam(parser.currentName());
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new IOException("Expected params field in rest api definition to contain an object");
|
throw new IllegalArgumentException("Expected params field in rest api definition to contain an object");
|
||||||
}
|
}
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,109 @@ import static org.hamcrest.Matchers.containsString;
|
|||||||
* stream
|
* stream
|
||||||
*/
|
*/
|
||||||
public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
|
public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testDuplicateMethods() throws Exception {
|
||||||
|
parseAndExpectFailure("{\n" +
|
||||||
|
" \"ping\": {" +
|
||||||
|
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||||
|
" \"methods\": [\"PUT\", \"PUT\"]," +
|
||||||
|
" \"url\": {" +
|
||||||
|
" \"path\": \"/\"," +
|
||||||
|
" \"paths\": [\"/\"]," +
|
||||||
|
" \"parts\": {" +
|
||||||
|
" }," +
|
||||||
|
" \"params\": {" +
|
||||||
|
" \"type\" : \"boolean\",\n" +
|
||||||
|
" \"description\" : \"Whether specified concrete indices should be ignored when unavailable (missing or closed)\"" +
|
||||||
|
" }" +
|
||||||
|
" }," +
|
||||||
|
" \"body\": null" +
|
||||||
|
" }" +
|
||||||
|
"}", "Found duplicate method [PUT]");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDuplicatePaths() throws Exception {
|
||||||
|
parseAndExpectFailure("{\n" +
|
||||||
|
" \"ping\": {" +
|
||||||
|
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||||
|
" \"methods\": [\"PUT\"]," +
|
||||||
|
" \"url\": {" +
|
||||||
|
" \"path\": \"/pingone\"," +
|
||||||
|
" \"paths\": [\"/pingone\", \"/pingtwo\", \"/pingtwo\"]," +
|
||||||
|
" \"parts\": {" +
|
||||||
|
" }," +
|
||||||
|
" \"params\": {" +
|
||||||
|
" \"type\" : \"boolean\",\n" +
|
||||||
|
" \"description\" : \"Whether specified concrete indices should be ignored when unavailable (missing or closed)\"" +
|
||||||
|
" }" +
|
||||||
|
" }," +
|
||||||
|
" \"body\": null" +
|
||||||
|
" }" +
|
||||||
|
"}", "Found duplicate path [/pingtwo]");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDuplicateParts() throws Exception {
|
||||||
|
parseAndExpectFailure("{\n" +
|
||||||
|
" \"ping\": {" +
|
||||||
|
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||||
|
" \"methods\": [\"PUT\"]," +
|
||||||
|
" \"url\": {" +
|
||||||
|
" \"path\": \"/\"," +
|
||||||
|
" \"paths\": [\"/\"]," +
|
||||||
|
" \"parts\": {" +
|
||||||
|
" \"index\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"index part\"\n" +
|
||||||
|
" }," +
|
||||||
|
" \"type\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"type part\"\n" +
|
||||||
|
" }," +
|
||||||
|
" \"index\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"index parameter part\"\n" +
|
||||||
|
" }" +
|
||||||
|
" }," +
|
||||||
|
" \"params\": {" +
|
||||||
|
" \"type\" : \"boolean\",\n" +
|
||||||
|
" \"description\" : \"Whether specified concrete indices should be ignored when unavailable (missing or closed)\"" +
|
||||||
|
" }" +
|
||||||
|
" }," +
|
||||||
|
" \"body\": null" +
|
||||||
|
" }" +
|
||||||
|
"}", "Found duplicate part [index]");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDuplicateParams() throws Exception {
|
||||||
|
parseAndExpectFailure("{\n" +
|
||||||
|
" \"ping\": {" +
|
||||||
|
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
|
||||||
|
" \"methods\": [\"PUT\"]," +
|
||||||
|
" \"url\": {" +
|
||||||
|
" \"path\": \"/\"," +
|
||||||
|
" \"paths\": [\"/\"]," +
|
||||||
|
" \"parts\": {" +
|
||||||
|
" }," +
|
||||||
|
" \"params\": {" +
|
||||||
|
" \"timeout\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"timeout parameter\"\n" +
|
||||||
|
" }," +
|
||||||
|
" \"refresh\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"refresh parameter\"\n" +
|
||||||
|
" }," +
|
||||||
|
" \"timeout\": {" +
|
||||||
|
" \"type\" : \"string\",\n" +
|
||||||
|
" \"description\" : \"timeout parameter again\"\n" +
|
||||||
|
" }" +
|
||||||
|
" }" +
|
||||||
|
" }," +
|
||||||
|
" \"body\": null" +
|
||||||
|
" }" +
|
||||||
|
"}", "Found duplicate param [timeout]");
|
||||||
|
}
|
||||||
|
|
||||||
public void testBrokenSpecShouldThrowUsefulExceptionWhenParsingFailsOnParams() throws Exception {
|
public void testBrokenSpecShouldThrowUsefulExceptionWhenParsingFailsOnParams() throws Exception {
|
||||||
parseAndExpectFailure(BROKEN_SPEC_PARAMS, "Expected params field in rest api definition to contain an object");
|
parseAndExpectFailure(BROKEN_SPEC_PARAMS, "Expected params field in rest api definition to contain an object");
|
||||||
}
|
}
|
||||||
@ -42,12 +145,10 @@ public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
|
|||||||
|
|
||||||
private void parseAndExpectFailure(String brokenJson, String expectedErrorMessage) throws Exception {
|
private void parseAndExpectFailure(String brokenJson, String expectedErrorMessage) throws Exception {
|
||||||
XContentParser parser = JsonXContent.jsonXContent.createParser(brokenJson);
|
XContentParser parser = JsonXContent.jsonXContent.createParser(brokenJson);
|
||||||
try {
|
ClientYamlSuiteRestApiParser restApiParser = new ClientYamlSuiteRestApiParser();
|
||||||
new ClientYamlSuiteRestApiParser().parse("location", parser);
|
|
||||||
fail("Expected to fail parsing but did not happen");
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> restApiParser.parse("location", parser));
|
||||||
} catch (IOException e) {
|
assertThat(e.getMessage(), containsString(expectedErrorMessage));
|
||||||
assertThat(e.getMessage(), containsString(expectedErrorMessage));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// see params section is broken, an inside param is missing
|
// see params section is broken, an inside param is missing
|
||||||
|
Loading…
x
Reference in New Issue
Block a user