Merge remote-tracking branch 'es/master' into ccr
* es/master: Added more parameter to PersistentTaskPlugin#getPersistentTasksExecutor(...) [Tests] Relax assertion in SuggestStatsIT (#28544) Make internal Rounding fields final (#28532) Fix the ability to remove old plugin [TEST] Expand failure message for wildfly integration tests Add 6.2.1 version constant Remove feature parsing for GetIndicesAction (#28535) No refresh on shard activation needed (#28013) Improve failure message when restoring an index that already exists in the cluster (#28498) Use right skip versions. [Docs] Fix incomplete URLs (#28528) Use non deprecated xcontenthelper (#28503) Painless: Fixes a null pointer exception in certain cases of for loop usage (#28506)
This commit is contained in:
commit
ed014e570f
|
@ -58,6 +58,7 @@ import org.elasticsearch.common.SuppressForbidden;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
|
@ -316,7 +317,8 @@ public final class Request {
|
|||
BytesReference indexSource = indexRequest.source();
|
||||
XContentType indexXContentType = indexRequest.getContentType();
|
||||
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, indexSource, indexXContentType)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
source = builder.bytes().toBytesRef();
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.elasticsearch.common.CheckedConsumer;
|
|||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ContextParser;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -637,7 +638,8 @@ public class RestHighLevelClient implements Closeable {
|
|||
if (xContentType == null) {
|
||||
throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue());
|
||||
}
|
||||
try (XContentParser parser = xContentType.xContent().createParser(registry, entity.getContent())) {
|
||||
try (XContentParser parser = xContentType.xContent().createParser(registry,
|
||||
LoggingDeprecationHandler.INSTANCE, entity.getContent())) {
|
||||
return entityParser.apply(parser);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ class RemovePluginCommand extends EnvironmentAwareCommand {
|
|||
|
||||
// first make sure nothing extends this plugin
|
||||
List<String> usedBy = new ArrayList<>();
|
||||
Set<PluginsService.Bundle> bundles = PluginsService.getPluginBundles(env.pluginsFile());
|
||||
Set<PluginsService.Bundle> bundles = PluginsService.getPluginBundles(env.pluginsFile(), false);
|
||||
for (PluginsService.Bundle bundle : bundles) {
|
||||
for (String extendedPlugin : bundle.plugin.getExtendedPlugins()) {
|
||||
if (extendedPlugin.equals(pluginName)) {
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -41,6 +42,7 @@ import java.util.Map;
|
|||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
|
||||
@LuceneTestCase.SuppressFileSystems("*")
|
||||
|
@ -78,19 +80,27 @@ public class RemovePluginCommandTests extends ESTestCase {
|
|||
env = TestEnvironment.newEnvironment(settings);
|
||||
}
|
||||
|
||||
void createPlugin(String name) throws Exception {
|
||||
void createPlugin(String name) throws IOException {
|
||||
createPlugin(env.pluginsFile(), name);
|
||||
}
|
||||
|
||||
void createPlugin(Path path, String name) throws Exception {
|
||||
void createPlugin(String name, Version version) throws IOException {
|
||||
createPlugin(env.pluginsFile(), name, version);
|
||||
}
|
||||
|
||||
void createPlugin(Path path, String name) throws IOException {
|
||||
createPlugin(path, name, Version.CURRENT);
|
||||
}
|
||||
|
||||
void createPlugin(Path path, String name, Version version) throws IOException {
|
||||
PluginTestUtil.writePluginProperties(
|
||||
path.resolve(name),
|
||||
"description", "dummy",
|
||||
"name", name,
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"classname", "SomeClass");
|
||||
path.resolve(name),
|
||||
"description", "dummy",
|
||||
"name", name,
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", version.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"classname", "SomeClass");
|
||||
}
|
||||
|
||||
void createMetaPlugin(String name, String... plugins) throws Exception {
|
||||
|
@ -137,6 +147,18 @@ public class RemovePluginCommandTests extends ESTestCase {
|
|||
assertRemoveCleaned(env);
|
||||
}
|
||||
|
||||
public void testRemoveOldVersion() throws Exception {
|
||||
createPlugin(
|
||||
"fake",
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
Version.CURRENT.minimumIndexCompatibilityVersion(),
|
||||
VersionUtils.getPreviousVersion()));
|
||||
removePlugin("fake", home, randomBoolean());
|
||||
assertThat(Files.exists(env.pluginsFile().resolve("fake")), equalTo(false));
|
||||
assertRemoveCleaned(env);
|
||||
}
|
||||
|
||||
public void testBasicMeta() throws Exception {
|
||||
createMetaPlugin("meta", "fake1");
|
||||
createPlugin("other");
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
|
||||
The Elasticsearch repository contains examples of:
|
||||
|
||||
* a https://github.com/elastic/elasticsearch/tree/master/plugins/custom-settings[Java plugin]
|
||||
* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/custom-settings[Java plugin]
|
||||
which contains a plugin with custom settings.
|
||||
* a https://github.com/elastic/elasticsearch/tree/master/plugins/rest-handler[Java plugin]
|
||||
* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/rest-handler[Java plugin]
|
||||
which contains a plugin that registers a Rest handler.
|
||||
* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/rescore[Java plugin]
|
||||
which contains a rescore plugin.
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.ingest.common;
|
|||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -97,7 +98,8 @@ public final class ScriptProcessor extends AbstractProcessor {
|
|||
public ScriptProcessor create(Map<String, Processor.Factory> registry, String processorTag,
|
||||
Map<String, Object> config) throws Exception {
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config);
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, builder.bytes().streamInput());
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, builder.bytes().streamInput());
|
||||
Script script = Script.parse(parser);
|
||||
|
||||
Arrays.asList("id", "source", "inline", "lang", "params", "options").forEach(config::remove);
|
||||
|
|
|
@ -76,7 +76,7 @@ public final class SFor extends AStatement {
|
|||
locals = Locals.newLocalScope(locals);
|
||||
|
||||
if (initializer != null) {
|
||||
if (initializer instanceof AStatement) {
|
||||
if (initializer instanceof SDeclBlock) {
|
||||
initializer.analyze(locals);
|
||||
} else if (initializer instanceof AExpression) {
|
||||
AExpression initializer = (AExpression)this.initializer;
|
||||
|
@ -87,6 +87,9 @@ public final class SFor extends AStatement {
|
|||
if (!initializer.statement) {
|
||||
throw createError(new IllegalArgumentException("Not a statement."));
|
||||
}
|
||||
|
||||
initializer.expected = initializer.actual;
|
||||
this.initializer = initializer.cast(locals);
|
||||
} else {
|
||||
throw createError(new IllegalStateException("Illegal tree structure."));
|
||||
}
|
||||
|
@ -119,6 +122,9 @@ public final class SFor extends AStatement {
|
|||
if (!afterthought.statement) {
|
||||
throw createError(new IllegalArgumentException("Not a statement."));
|
||||
}
|
||||
|
||||
afterthought.expected = afterthought.actual;
|
||||
afterthought = afterthought.cast(locals);
|
||||
}
|
||||
|
||||
if (block != null) {
|
||||
|
@ -197,6 +203,7 @@ public final class SFor extends AStatement {
|
|||
if (afterthought != null) {
|
||||
writer.mark(begin);
|
||||
afterthought.write(writer, globals);
|
||||
writer.writePop(MethodWriter.getType(afterthought.expected).getSize());
|
||||
}
|
||||
|
||||
if (afterthought != null || !allEscape) {
|
||||
|
|
|
@ -108,8 +108,12 @@ public class BasicStatementTests extends ScriptTestCase {
|
|||
}
|
||||
|
||||
public void testForStatement() {
|
||||
assertEquals(6, exec("int x, y; for (x = 0; x < 4; ++x) {y += x;} return y;"));
|
||||
assertEquals("aaaaaa", exec("String c = \"a\"; for (int x = 0; x < 5; ++x) c += \"a\"; return c;"));
|
||||
|
||||
assertEquals(6, exec("double test() { return 0.0; }" +
|
||||
"int x, y; for (test(); x < 4; test()) {y += x; ++x;} return y;"));
|
||||
|
||||
Object value = exec(
|
||||
" int[][] b = new int[5][5]; \n" +
|
||||
" for (int x = 0; x < 5; ++x) { \n" +
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -105,7 +106,8 @@ public class TransportRankEvalAction extends HandledTransportAction<RankEvalRequ
|
|||
String templateId = ratedRequest.getTemplateId();
|
||||
TemplateScript.Factory templateScript = scriptsWithoutParams.get(templateId);
|
||||
String resolvedRequest = templateScript.newInstance(params).execute();
|
||||
try (XContentParser subParser = createParser(namedXContentRegistry, new BytesArray(resolvedRequest), XContentType.JSON)) {
|
||||
try (XContentParser subParser = createParser(namedXContentRegistry,
|
||||
LoggingDeprecationHandler.INSTANCE, new BytesArray(resolvedRequest), XContentType.JSON)) {
|
||||
ratedSearchSource = SearchSourceBuilder.fromXContent(subParser);
|
||||
} catch (IOException e) {
|
||||
// if we fail parsing, put the exception into the errors map and continue
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.bulk.BackoffPolicy;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.index.reindex.ScrollableHitSource;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
|
@ -196,7 +197,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
|
|||
}
|
||||
// EMPTY is safe here because we don't call namedObject
|
||||
try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY,
|
||||
content)) {
|
||||
LoggingDeprecationHandler.INSTANCE, content)) {
|
||||
parsedResponse = parser.apply(xContentParser, xContentType);
|
||||
} catch (ParsingException e) {
|
||||
/* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.ingest.useragent;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -54,7 +55,8 @@ final class UserAgentParser {
|
|||
|
||||
private void init(InputStream regexStream) throws IOException {
|
||||
// EMPTY is safe here because we don't use namedObject
|
||||
XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(NamedXContentRegistry.EMPTY, regexStream);
|
||||
XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, regexStream);
|
||||
|
||||
XContentParser.Token token = yamlParser.nextToken();
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -78,7 +79,9 @@ public class WildflyIT extends LuceneTestCase {
|
|||
}
|
||||
put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON));
|
||||
try (CloseableHttpResponse response = client.execute(put)) {
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(201));
|
||||
int status = response.getStatusLine().getStatusCode();
|
||||
assertThat("expected a 201 response but got: " + status + " - body: " + EntityUtils.toString(response.getEntity()),
|
||||
status, equalTo(201));
|
||||
}
|
||||
|
||||
final HttpGet get = new HttpGet(new URI(str));
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
"top_hits aggregation with nested documents":
|
||||
- skip:
|
||||
version: "5.99.99 - "
|
||||
reason: "5.x nodes don't include index or id in nested top hits"
|
||||
version: " - 6.1.99"
|
||||
reason: "<= 6.1 nodes don't always include index or id in nested top hits"
|
||||
- do:
|
||||
indices.create:
|
||||
index: my-index
|
||||
|
|
|
@ -13,9 +13,8 @@ setup:
|
|||
---
|
||||
"Nested inner hits":
|
||||
- skip:
|
||||
# this will only run in a mixed cluster environment with at least 1 5.x node
|
||||
version: "5.99.99 - "
|
||||
reason: multiple types are not supported on 6.x indices onwards
|
||||
version: " - 6.1.99"
|
||||
reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
|
@ -35,7 +34,7 @@ setup:
|
|||
- match: { hits.hits.0._type: "type_1" }
|
||||
- match: { hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "type1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "type_1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.field: "nested_field" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 }
|
||||
|
|
|
@ -152,6 +152,8 @@ public class Version implements Comparable<Version> {
|
|||
public static final Version V_6_1_4 = new Version(V_6_1_4_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final int V_6_2_0_ID = 6020099;
|
||||
public static final Version V_6_2_0 = new Version(V_6_2_0_ID, org.apache.lucene.util.Version.LUCENE_7_2_1);
|
||||
public static final int V_6_2_1_ID = 6020199;
|
||||
public static final Version V_6_2_1 = new Version(V_6_2_1_ID, org.apache.lucene.util.Version.LUCENE_7_2_1);
|
||||
public static final int V_6_3_0_ID = 6030099;
|
||||
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_2_1);
|
||||
public static final int V_7_0_0_alpha1_ID = 7000001;
|
||||
|
@ -174,6 +176,8 @@ public class Version implements Comparable<Version> {
|
|||
return V_7_0_0_alpha1;
|
||||
case V_6_3_0_ID:
|
||||
return V_6_3_0;
|
||||
case V_6_2_1_ID:
|
||||
return V_6_2_1;
|
||||
case V_6_2_0_ID:
|
||||
return V_6_2_0;
|
||||
case V_6_1_4_ID:
|
||||
|
|
|
@ -205,16 +205,16 @@ public class Alias implements Streamable, ToXContentObject {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (FILTER.match(currentFieldName)) {
|
||||
if (FILTER.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
Map<String, Object> filter = parser.mapOrdered();
|
||||
alias.filter(filter);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (ROUTING.match(currentFieldName)) {
|
||||
if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
alias.routing(parser.text());
|
||||
} else if (INDEX_ROUTING.match(currentFieldName)) {
|
||||
} else if (INDEX_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
alias.indexRouting(parser.text());
|
||||
} else if (SEARCH_ROUTING.match(currentFieldName)) {
|
||||
} else if (SEARCH_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
alias.searchRouting(parser.text());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,9 +34,9 @@ import java.util.List;
|
|||
*/
|
||||
public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
||||
public enum Feature {
|
||||
ALIASES((byte) 0, "_aliases", "_alias"),
|
||||
MAPPINGS((byte) 1, "_mappings", "_mapping"),
|
||||
SETTINGS((byte) 2, "_settings");
|
||||
ALIASES((byte) 0),
|
||||
MAPPINGS((byte) 1),
|
||||
SETTINGS((byte) 2);
|
||||
|
||||
private static final Feature[] FEATURES = new Feature[Feature.values().length];
|
||||
|
||||
|
@ -47,52 +47,22 @@ public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
|||
}
|
||||
}
|
||||
|
||||
private final List<String> validNames;
|
||||
private final String preferredName;
|
||||
private final byte id;
|
||||
|
||||
Feature(byte id, String... validNames) {
|
||||
assert validNames != null && validNames.length > 0;
|
||||
Feature(byte id) {
|
||||
this.id = id;
|
||||
this.validNames = Arrays.asList(validNames);
|
||||
this.preferredName = validNames[0];
|
||||
}
|
||||
|
||||
public byte id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String preferredName() {
|
||||
return preferredName;
|
||||
}
|
||||
|
||||
public boolean validName(String name) {
|
||||
return this.validNames.contains(name);
|
||||
}
|
||||
|
||||
public static Feature fromName(String name) {
|
||||
for (Feature feature : Feature.values()) {
|
||||
if (feature.validName(name)) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("No endpoint or operation is available at [" + name + "]");
|
||||
}
|
||||
|
||||
public static Feature fromId(byte id) {
|
||||
if (id < 0 || id >= FEATURES.length) {
|
||||
throw new IllegalArgumentException("No mapping for id [" + id + "]");
|
||||
}
|
||||
return FEATURES[id];
|
||||
}
|
||||
|
||||
public static Feature[] convertToFeatures(String... featureNames) {
|
||||
Feature[] features = new Feature[featureNames.length];
|
||||
for (int i = 0; i < featureNames.length; i++) {
|
||||
features[i] = Feature.fromName(featureNames[i]);
|
||||
}
|
||||
return features;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Feature[] DEFAULT_FEATURES = new Feature[] { Feature.ALIASES, Feature.MAPPINGS, Feature.SETTINGS };
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -864,7 +865,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
}
|
||||
if (doc != null) {
|
||||
XContentType xContentType = doc.getContentType();
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, doc.source(), xContentType)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, doc.source(), xContentType)) {
|
||||
builder.field("doc");
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
|
@ -874,7 +876,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
}
|
||||
if (upsertRequest != null) {
|
||||
XContentType xContentType = upsertRequest.getContentType();
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, upsertRequest.source(), xContentType)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, upsertRequest.source(), xContentType)) {
|
||||
builder.field("upsert");
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.common.rounding;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.joda.time.DateTimeField;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -33,7 +33,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* A strategy for rounding long values.
|
||||
*/
|
||||
public abstract class Rounding implements Streamable {
|
||||
public abstract class Rounding implements Writeable {
|
||||
|
||||
public abstract byte id();
|
||||
|
||||
|
@ -107,13 +107,10 @@ public abstract class Rounding implements Streamable {
|
|||
|
||||
static final byte ID = 1;
|
||||
|
||||
private DateTimeUnit unit;
|
||||
private DateTimeField field;
|
||||
private DateTimeZone timeZone;
|
||||
private boolean unitRoundsToMidnight;
|
||||
|
||||
TimeUnitRounding() { // for serialization
|
||||
}
|
||||
private final DateTimeUnit unit;
|
||||
private final DateTimeField field;
|
||||
private final DateTimeZone timeZone;
|
||||
private final boolean unitRoundsToMidnight;
|
||||
|
||||
TimeUnitRounding(DateTimeUnit unit, DateTimeZone timeZone) {
|
||||
this.unit = unit;
|
||||
|
@ -122,6 +119,13 @@ public abstract class Rounding implements Streamable {
|
|||
this.timeZone = timeZone;
|
||||
}
|
||||
|
||||
TimeUnitRounding(StreamInput in) throws IOException {
|
||||
unit = DateTimeUnit.resolve(in.readByte());
|
||||
timeZone = DateTimeZone.forID(in.readString());
|
||||
field = unit.field(timeZone);
|
||||
unitRoundsToMidnight = field.getDurationField().getUnitMillis() > 60L * 60L * 1000L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte id() {
|
||||
return ID;
|
||||
|
@ -237,14 +241,6 @@ public abstract class Rounding implements Streamable {
|
|||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
unit = DateTimeUnit.resolve(in.readByte());
|
||||
timeZone = DateTimeZone.forID(in.readString());
|
||||
field = unit.field(timeZone);
|
||||
unitRoundsToMidnight = field.getDurationField().getUnitMillis() > 60L * 60L * 1000L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte(unit.id());
|
||||
|
@ -278,11 +274,8 @@ public abstract class Rounding implements Streamable {
|
|||
|
||||
static final byte ID = 2;
|
||||
|
||||
private long interval;
|
||||
private DateTimeZone timeZone;
|
||||
|
||||
TimeIntervalRounding() { // for serialization
|
||||
}
|
||||
private final long interval;
|
||||
private final DateTimeZone timeZone;
|
||||
|
||||
TimeIntervalRounding(long interval, DateTimeZone timeZone) {
|
||||
if (interval < 1)
|
||||
|
@ -291,6 +284,11 @@ public abstract class Rounding implements Streamable {
|
|||
this.timeZone = timeZone;
|
||||
}
|
||||
|
||||
TimeIntervalRounding(StreamInput in) throws IOException {
|
||||
interval = in.readVLong();
|
||||
timeZone = DateTimeZone.forID(in.readString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte id() {
|
||||
return ID;
|
||||
|
@ -374,12 +372,6 @@ public abstract class Rounding implements Streamable {
|
|||
return timeZone.convertLocalToUTC(next, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
interval = in.readVLong();
|
||||
timeZone = DateTimeZone.forID(in.readString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(interval);
|
||||
|
@ -415,11 +407,10 @@ public abstract class Rounding implements Streamable {
|
|||
Rounding rounding = null;
|
||||
byte id = in.readByte();
|
||||
switch (id) {
|
||||
case TimeUnitRounding.ID: rounding = new TimeUnitRounding(); break;
|
||||
case TimeIntervalRounding.ID: rounding = new TimeIntervalRounding(); break;
|
||||
case TimeUnitRounding.ID: rounding = new TimeUnitRounding(in); break;
|
||||
case TimeIntervalRounding.ID: rounding = new TimeIntervalRounding(in); break;
|
||||
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
|
||||
}
|
||||
rounding.readFrom(in);
|
||||
return rounding;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ public class XContentHelper {
|
|||
|
||||
/**
|
||||
* Creates a parser based on the bytes provided
|
||||
* @deprecated use {@link #createParser(NamedXContentRegistry, BytesReference, XContentType)} to avoid content type auto-detection
|
||||
* @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, BytesReference, XContentType)} to avoid content type auto-detection
|
||||
*/
|
||||
@Deprecated
|
||||
public static XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException {
|
||||
|
@ -60,8 +60,8 @@ public class XContentHelper {
|
|||
/**
|
||||
* Creates a parser for the bytes using the supplied content-type
|
||||
*/
|
||||
public static XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes,
|
||||
XContentType xContentType) throws IOException {
|
||||
public static XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler,
|
||||
BytesReference bytes, XContentType xContentType) throws IOException {
|
||||
Objects.requireNonNull(xContentType);
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
if (compressor != null) {
|
||||
|
@ -69,7 +69,7 @@ public class XContentHelper {
|
|||
if (compressedInput.markSupported() == false) {
|
||||
compressedInput = new BufferedInputStream(compressedInput);
|
||||
}
|
||||
return XContentFactory.xContent(xContentType).createParser(xContentRegistry, compressedInput);
|
||||
return XContentFactory.xContent(xContentType).createParser(xContentRegistry, deprecationHandler, compressedInput);
|
||||
} else {
|
||||
return xContentType.xContent().createParser(xContentRegistry, bytes.streamInput());
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ public class XContentHelper {
|
|||
public static Map<String, Object> convertToMap(XContent xContent, InputStream input, boolean ordered)
|
||||
throws ElasticsearchParseException {
|
||||
// It is safe to use EMPTY here because this never uses namedObject
|
||||
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, input)) {
|
||||
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, input)) {
|
||||
return ordered ? parser.mapOrdered() : parser.map();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("Failed to parse content to map", e);
|
||||
|
@ -161,7 +161,7 @@ public class XContentHelper {
|
|||
|
||||
// It is safe to use EMPTY here because this never uses namedObject
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY,
|
||||
bytes.streamInput())) {
|
||||
LoggingDeprecationHandler.INSTANCE, bytes.streamInput())) {
|
||||
parser.nextToken();
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
if (prettyPrint) {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -61,7 +62,8 @@ final class DocumentParser {
|
|||
final ParseContext.InternalParseContext context;
|
||||
final XContentType xContentType = source.getXContentType();
|
||||
|
||||
try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(), source.source(), xContentType)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(),
|
||||
LoggingDeprecationHandler.INSTANCE, source.source(), xContentType)) {
|
||||
context = new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, source, parser);
|
||||
validateStart(parser);
|
||||
internalParseDocument(mapping, context, parser);
|
||||
|
|
|
@ -406,12 +406,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
|
||||
if (state == IndexShardState.POST_RECOVERY && newRouting.active()) {
|
||||
assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting;
|
||||
// we want to refresh *before* we move to internal STARTED state
|
||||
try {
|
||||
getEngine().refresh("cluster_state_started");
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to refresh due to move to cluster wide started", e);
|
||||
}
|
||||
|
||||
if (newRouting.primary() && currentRouting.isRelocationTarget() == false) {
|
||||
replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint());
|
||||
|
|
|
@ -473,7 +473,7 @@ public class Node implements Closeable {
|
|||
|
||||
final List<PersistentTasksExecutor<?>> tasksExecutors = pluginsService
|
||||
.filterPlugins(PersistentTaskPlugin.class).stream()
|
||||
.map(p -> p.getPersistentTasksExecutor(clusterService))
|
||||
.map(p -> p.getPersistentTasksExecutor(clusterService, threadPool, client))
|
||||
.flatMap(List::stream)
|
||||
.collect(toList());
|
||||
|
||||
|
|
|
@ -18,8 +18,10 @@
|
|||
*/
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.persistent.PersistentTasksExecutor;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -32,7 +34,8 @@ public interface PersistentTaskPlugin {
|
|||
/**
|
||||
* Returns additional persistent tasks executors added by this plugin.
|
||||
*/
|
||||
default List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService) {
|
||||
default List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService,
|
||||
ThreadPool threadPool, Client client) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
|
|
|
@ -181,6 +181,19 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
* @throws IOException if an I/O exception occurred reading the plugin descriptor
|
||||
*/
|
||||
public static PluginInfo readFromProperties(final Path path) throws IOException {
|
||||
return readFromProperties(path, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and validates the plugin descriptor file. If {@code enforceVersion} is false then version enforcement for the plugin descriptor
|
||||
* is skipped.
|
||||
*
|
||||
* @param path the path to the root directory for the plugin
|
||||
* @param enforceVersion whether or not to enforce the version when reading plugin descriptors
|
||||
* @return the plugin info
|
||||
* @throws IOException if an I/O exception occurred reading the plugin descriptor
|
||||
*/
|
||||
static PluginInfo readFromProperties(final Path path, final boolean enforceVersion) throws IOException {
|
||||
final Path descriptor = path.resolve(ES_PLUGIN_PROPERTIES);
|
||||
|
||||
final Map<String, String> propsMap;
|
||||
|
@ -214,7 +227,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
"property [elasticsearch.version] is missing for plugin [" + name + "]");
|
||||
}
|
||||
final Version esVersion = Version.fromString(esVersionString);
|
||||
if (esVersion.equals(Version.CURRENT) == false) {
|
||||
if (enforceVersion && esVersion.equals(Version.CURRENT) == false) {
|
||||
final String message = String.format(
|
||||
Locale.ROOT,
|
||||
"plugin [%s] is incompatible with version [%s]; was designed for version [%s]",
|
||||
|
@ -258,12 +271,12 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
break;
|
||||
default:
|
||||
final String message = String.format(
|
||||
Locale.ROOT,
|
||||
"property [%s] must be [%s], [%s], or unspecified but was [%s]",
|
||||
"has_native_controller",
|
||||
"true",
|
||||
"false",
|
||||
hasNativeControllerValue);
|
||||
Locale.ROOT,
|
||||
"property [%s] must be [%s], [%s], or unspecified but was [%s]",
|
||||
"has_native_controller",
|
||||
"true",
|
||||
"false",
|
||||
hasNativeControllerValue);
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
}
|
||||
|
@ -277,7 +290,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
requiresKeystore = Booleans.parseBoolean(requiresKeystoreValue);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("property [requires.keystore] must be [true] or [false]," +
|
||||
" but was [" + requiresKeystoreValue + "]", e);
|
||||
" but was [" + requiresKeystoreValue + "]", e);
|
||||
}
|
||||
|
||||
if (propsMap.isEmpty() == false) {
|
||||
|
|
|
@ -317,7 +317,27 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
static Set<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
|
||||
/**
|
||||
* Get the plugin bundles from the specified directory.
|
||||
*
|
||||
* @param pluginsDirectory the directory
|
||||
* @return the set of plugin bundles in the specified directory
|
||||
* @throws IOException if an I/O exception occurs reading the plugin bundles
|
||||
*/
|
||||
static Set<Bundle> getPluginBundles(final Path pluginsDirectory) throws IOException {
|
||||
return getPluginBundles(pluginsDirectory, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the plugin bundles from the specified directory. If {@code enforceVersion} is true, then the version in each plugin descriptor
|
||||
* must match the current version.
|
||||
*
|
||||
* @param pluginsDirectory the directory
|
||||
* @param enforceVersion whether or not to enforce the version when reading plugin descriptors
|
||||
* @return the set of plugin bundles in the specified directory
|
||||
* @throws IOException if an I/O exception occurs reading the plugin bundles
|
||||
*/
|
||||
static Set<Bundle> getPluginBundles(final Path pluginsDirectory, final boolean enforceVersion) throws IOException {
|
||||
Logger logger = Loggers.getLogger(PluginsService.class);
|
||||
Set<Bundle> bundles = new LinkedHashSet<>();
|
||||
|
||||
|
@ -326,10 +346,10 @@ public class PluginsService extends AbstractComponent {
|
|||
logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath());
|
||||
final PluginInfo info;
|
||||
try {
|
||||
info = PluginInfo.readFromProperties(plugin);
|
||||
info = PluginInfo.readFromProperties(plugin, enforceVersion);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Could not load plugin descriptor for existing plugin ["
|
||||
+ plugin.getFileName() + "]. Was the plugin built before 2.0?", e);
|
||||
+ plugin.getFileName() + "]. Was the plugin built before 2.0?", e);
|
||||
}
|
||||
if (bundles.add(new Bundle(info, plugin)) == false) {
|
||||
throw new IllegalStateException("duplicate plugin: " + info);
|
||||
|
|
|
@ -68,6 +68,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
|
|||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -605,7 +606,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Streams.copy(blob, out);
|
||||
// EMPTY is safe here because RepositoryData#fromXContent calls namedObject
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes(), XContentType.JSON)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, out.bytes(), XContentType.JSON)) {
|
||||
repositoryData = RepositoryData.snapshotsFromXContent(parser, indexGen);
|
||||
} catch (NotXContentException e) {
|
||||
logger.warn("[{}] index blob is not valid x-content [{} bytes]", snapshotsIndexBlobName, out.bytes().length());
|
||||
|
@ -617,7 +619,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
try (InputStream blob = snapshotsBlobContainer.readBlob(INCOMPATIBLE_SNAPSHOTS_BLOB)) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Streams.copy(blob, out);
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes(), XContentType.JSON)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
LoggingDeprecationHandler.INSTANCE, out.bytes(), XContentType.JSON)) {
|
||||
repositoryData = repositoryData.incompatibleSnapshotsFromXContent(parser);
|
||||
}
|
||||
} catch (NoSuchFileException e) {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -311,7 +312,8 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
|
|||
|
||||
static Map<String, List<ContextMapping.InternalQueryContext>> parseContextBytes(BytesReference contextBytes,
|
||||
NamedXContentRegistry xContentRegistry, ContextMappings contextMappings) throws IOException {
|
||||
try (XContentParser contextParser = XContentHelper.createParser(xContentRegistry, contextBytes, CONTEXT_BYTES_XCONTENT_TYPE)) {
|
||||
try (XContentParser contextParser = XContentHelper.createParser(xContentRegistry,
|
||||
LoggingDeprecationHandler.INSTANCE, contextBytes, CONTEXT_BYTES_XCONTENT_TYPE)) {
|
||||
contextParser.nextToken();
|
||||
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size());
|
||||
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
|
||||
|
|
|
@ -362,7 +362,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateApp
|
|||
// Index exist - checking that it's closed
|
||||
if (currentIndexMetaData.getState() != IndexMetaData.State.CLOSE) {
|
||||
// TODO: Enable restore for open indices
|
||||
throw new SnapshotRestoreException(snapshot, "cannot restore index [" + renamedIndex + "] because it's open");
|
||||
throw new SnapshotRestoreException(snapshot, "cannot restore index [" + renamedIndex + "] because an open index with same name already exists in the cluster. " +
|
||||
"Either close or delete the existing index or restore the index under a different name by providing a rename pattern and replacement name");
|
||||
}
|
||||
// Index exist - checking if it's partial restore
|
||||
if (partial) {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -102,7 +103,8 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
|
|||
|
||||
BytesReference templateBytes = new BytesArray(template);
|
||||
final IndexTemplateMetaData indexTemplateMetaData;
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, templateBytes, XContentType.JSON)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, templateBytes, XContentType.JSON)) {
|
||||
indexTemplateMetaData = IndexTemplateMetaData.Builder.fromXContent(parser, "test");
|
||||
}
|
||||
|
||||
|
@ -115,7 +117,8 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
|
|||
}
|
||||
|
||||
final IndexTemplateMetaData indexTemplateMetaDataRoundTrip;
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, templateBytesRoundTrip, XContentType.JSON)) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, templateBytesRoundTrip, XContentType.JSON)) {
|
||||
indexTemplateMetaDataRoundTrip = IndexTemplateMetaData.Builder.fromXContent(parser, "test");
|
||||
}
|
||||
assertThat(indexTemplateMetaData, equalTo(indexTemplateMetaDataRoundTrip));
|
||||
|
@ -142,7 +145,8 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
|
|||
randomAlphaOfLength(10) + "\":{\"type\":\"keyword\"}}" +
|
||||
"}}}";
|
||||
try (XContentParser parser =
|
||||
XContentHelper.createParser(NamedXContentRegistry.EMPTY, new BytesArray(templateWithEmptyPattern), XContentType.JSON)) {
|
||||
XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new BytesArray(templateWithEmptyPattern), XContentType.JSON)) {
|
||||
final IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> IndexTemplateMetaData.Builder.fromXContent(parser, randomAlphaOfLengthBetween(1, 100)));
|
||||
assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got []"));
|
||||
|
@ -156,7 +160,8 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
|
|||
randomAlphaOfLength(10) + "\":{\"type\":\"keyword\"}}" +
|
||||
"}}}";
|
||||
try (XContentParser parser =
|
||||
XContentHelper.createParser(NamedXContentRegistry.EMPTY, new BytesArray(templateWithoutPattern), XContentType.JSON)) {
|
||||
XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new BytesArray(templateWithoutPattern), XContentType.JSON)) {
|
||||
final IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> IndexTemplateMetaData.Builder.fromXContent(parser, randomAlphaOfLengthBetween(1, 100)));
|
||||
assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got null"));
|
||||
|
|
|
@ -1172,14 +1172,14 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
|
||||
public void testRefreshMetric() throws IOException {
|
||||
IndexShard shard = newStartedShard();
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(3L)); // refresh on: finalize, end of recovery and on starting shard
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery
|
||||
long initialTotalTime = shard.refreshStats().getTotalTimeInMillis();
|
||||
// check time advances
|
||||
for (int i = 1; shard.refreshStats().getTotalTimeInMillis() == initialTotalTime; i++) {
|
||||
indexDoc(shard, "test", "test");
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(3L + i - 1));
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(2L + i - 1));
|
||||
shard.refresh("test");
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(3L + i));
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(2L + i));
|
||||
assertThat(shard.refreshStats().getTotalTimeInMillis(), greaterThanOrEqualTo(initialTotalTime));
|
||||
}
|
||||
long refreshCount = shard.refreshStats().getTotal();
|
||||
|
|
|
@ -112,7 +112,7 @@ public class SuggestStatsIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("iter {}, iter1 {}, iter2 {}, {}", suggestAllIdx, suggestIdx1, suggestIdx2, endTime - startTime);
|
||||
// check suggest time
|
||||
assertThat(suggest.getSuggestTimeInMillis(), greaterThan(0L));
|
||||
assertThat(suggest.getSuggestTimeInMillis(), greaterThanOrEqualTo(0L));
|
||||
// the upperbound is num shards * total time since we do searches in parallel
|
||||
assertThat(suggest.getSuggestTimeInMillis(), lessThanOrEqualTo(totalShards * (endTime - startTime)));
|
||||
|
||||
|
@ -124,7 +124,7 @@ public class SuggestStatsIT extends ESIntegTestCase {
|
|||
logger.info("evaluating {}", stat.getNode());
|
||||
if (nodeIdsWithIndex.contains(stat.getNode().getId())) {
|
||||
assertThat(suggestStats.getSuggestCount(), greaterThan(0L));
|
||||
assertThat(suggestStats.getSuggestTimeInMillis(), greaterThan(0L));
|
||||
assertThat(suggestStats.getSuggestTimeInMillis(), greaterThanOrEqualTo(0L));
|
||||
num++;
|
||||
} else {
|
||||
assertThat(suggestStats.getSuggestCount(), equalTo(0L));
|
||||
|
|
|
@ -51,25 +51,20 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
|||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.PersistentTaskPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskCancelledException;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -96,7 +91,8 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, P
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService) {
|
||||
public List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService,
|
||||
ThreadPool threadPool, Client client) {
|
||||
return Collections.singletonList(new TestPersistentTasksExecutor(Settings.EMPTY, clusterService));
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
|
@ -184,7 +185,8 @@ public final class XContentTestUtils {
|
|||
List<String> insertPaths;
|
||||
|
||||
// we can use NamedXContentRegistry.EMPTY here because we only traverse the xContent once and don't use it
|
||||
try (XContentParser parser = createParser(NamedXContentRegistry.EMPTY, xContent, contentType)) {
|
||||
try (XContentParser parser = createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, xContent, contentType)) {
|
||||
parser.nextToken();
|
||||
List<String> possiblePaths = XContentTestUtils.getInsertPaths(parser, new Stack<>());
|
||||
if (excludeFilter == null) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.Set;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -94,7 +95,8 @@ public class ClientYamlSuiteRestSpec {
|
|||
|
||||
private static void parseSpecFile(ClientYamlSuiteRestApiParser restApiParser, Path jsonFile, ClientYamlSuiteRestSpec restSpec) {
|
||||
try (InputStream stream = Files.newInputStream(jsonFile)) {
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, stream)) {
|
||||
try (XContentParser parser =
|
||||
JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
|
||||
String filename = jsonFile.getFileName().toString();
|
||||
if (filename.equals("_common.json")) {
|
||||
String currentFieldName = null;
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
package org.elasticsearch.test.rest.yaml.section;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
|
||||
|
@ -63,7 +65,7 @@ public class ClientYamlTestSuite {
|
|||
}
|
||||
|
||||
try (XContentParser parser = YamlXContent.yamlXContent.createParser(ExecutableSection.XCONTENT_REGISTRY,
|
||||
Files.newInputStream(file))) {
|
||||
LoggingDeprecationHandler.INSTANCE, Files.newInputStream(file))) {
|
||||
return parse(api, filename, parser);
|
||||
} catch(Exception e) {
|
||||
throw new IOException("Error parsing " + api + "/" + filename, e);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -71,7 +72,8 @@ public class XContentTestUtilsTests extends ESTestCase {
|
|||
}
|
||||
builder.endObject();
|
||||
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, builder.bytes(), builder.contentType())) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) {
|
||||
parser.nextToken();
|
||||
List<String> insertPaths = XContentTestUtils.getInsertPaths(parser, new Stack<>());
|
||||
assertEquals(5, insertPaths.size());
|
||||
|
@ -96,7 +98,8 @@ public class XContentTestUtilsTests extends ESTestCase {
|
|||
Collections.singletonList("inn\\.er1"), () -> "inner2", () -> new HashMap<>());
|
||||
builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(),
|
||||
Collections.singletonList("inn\\.er1"), () -> "field2", () -> "value2");
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, builder.bytes(), builder.contentType())) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) {
|
||||
Map<String, Object> map = parser.map();
|
||||
assertEquals(2, map.size());
|
||||
assertEquals("value1", map.get("field1"));
|
||||
|
|
Loading…
Reference in New Issue