diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index eba6dbfc819..dba0529923f 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -349,7 +349,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 97711eed427..bd3ea6797db 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -44,7 +44,7 @@ import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; /** @@ -357,7 +357,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte * instances. */ public static ElasticsearchException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = ensureFieldName(parser.nextToken(), parser::getTokenLocation); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); String type = null, reason = null, stack = null; ElasticsearchException cause = null; @@ -786,7 +787,9 @@ public class ElasticsearchException extends RuntimeException implements ToXConte TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class, org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1_UNRELEASED), SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class, - org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2); + org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2), + UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class, + org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0_UNRELEASED); final Class exceptionClass; final FunctionThatThrowsIOException constructor; diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchParseException.java b/core/src/main/java/org/elasticsearch/ElasticsearchParseException.java index cdccdb8da95..1711e9a3aaf 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchParseException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchParseException.java @@ -24,6 +24,9 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +/** + * Unchecked exception that is translated into a {@code 400 BAD REQUEST} error when it bubbles out over HTTP. + */ public class ElasticsearchParseException extends ElasticsearchException { public ElasticsearchParseException(String msg, Object... args) { diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index aef99494d92..f96dfcf0f7c 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -35,6 +35,8 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.Locale; /** @@ -186,8 +188,9 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr /** * Gets the location of the written document as a string suitable for a {@code Location} header. * @param routing any routing used in the request. If null the location doesn't include routing information. + * */ - public String getLocation(@Nullable String routing) { + public String getLocation(@Nullable String routing) throws URISyntaxException { // Absolute path for the location of the document. This should be allowed as of HTTP/1.1: // https://tools.ietf.org/html/rfc7231#section-7.1.2 String index = getIndex(); @@ -205,7 +208,9 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr if (routing != null) { location.append(routingStart).append(routing); } - return location.toString(); + + URI uri = new URI(location.toString()); + return uri.toASCIIString(); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 7dd2c0df84c..6f2a72c5ba4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -141,8 +141,8 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< } assert waitFor >= 0; - final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext()); - final ClusterState state = observer.observedState(); + final ClusterState state = clusterService.state(); + final ClusterStateObserver observer = new ClusterStateObserver(state, clusterService, null, logger, threadPool.getThreadContext()); if (request.timeout().millis() == 0) { listener.onResponse(getResponse(request, state, waitFor, request.timeout().millis() == 0)); return; @@ -163,8 +163,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< @Override public void onTimeout(TimeValue timeout) { - final ClusterState clusterState = clusterService.state(); - final ClusterHealthResponse response = getResponse(request, clusterState, concreteWaitFor, true); + final ClusterHealthResponse response = getResponse(request, observer.setAndGetObservedState(), concreteWaitFor, true); listener.onResponse(response); } }; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index b73adbf8112..a6c8941358d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexNotFoundException; @@ -67,14 +68,17 @@ public class TransportGetTaskAction extends HandledTransportAction ParseFieldMatcher.STRICT); listener.onResponse(new GetTaskResponse(result)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 17df06dbf4b..203483d89b3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -307,7 +308,8 @@ public class CreateIndexRequest extends AcknowledgedRequest * Sets the aliases that will be associated with the index when it gets created */ public CreateIndexRequest aliases(BytesReference source) { - try (XContentParser parser = XContentHelper.createParser(source)) { + // EMPTY is safe here because we never call namedObject + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, source)) { //move to the first alias parser.nextToken(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -361,11 +363,7 @@ public class CreateIndexRequest extends AcknowledgedRequest public CreateIndexRequest source(BytesReference source) { XContentType xContentType = XContentFactory.xContentType(source); if (xContentType != null) { - try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) { - source(parser.map()); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse source for create index", e); - } + source(XContentHelper.convertToMap(source, false).v2()); } else { settings(source.utf8ToString()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 77c3a1d4c29..a7d6241d31e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -351,11 +352,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest shardOperationOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - logger.info("TTRACE: in bulk shardOperationReplica for [{}]", request); Translog.Location location = null; for (int i = 0; i < request.items().length; i++) { BulkItemRequest item = request.items()[i]; diff --git a/core/src/main/java/org/elasticsearch/action/get/GetResponse.java b/core/src/main/java/org/elasticsearch/action/get/GetResponse.java index 1b347a2d05d..3ba21c447e7 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -27,12 +27,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.get.GetField; import org.elasticsearch.index.get.GetResult; import java.io.IOException; import java.util.Iterator; import java.util.Map; +import java.util.Objects; /** * The response of a get action. @@ -42,7 +44,7 @@ import java.util.Map; */ public class GetResponse extends ActionResponse implements Iterable, ToXContent { - private GetResult getResult; + GetResult getResult; GetResponse() { } @@ -156,6 +158,11 @@ public class GetResponse extends ActionResponse implements Iterable, T return getResult.toXContent(builder, params); } + public static GetResponse fromXContent(XContentParser parser) throws IOException { + GetResult getResult = GetResult.fromXContent(parser); + return new GetResponse(getResult); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -168,6 +175,23 @@ public class GetResponse extends ActionResponse implements Iterable, T getResult.writeTo(out); } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetResponse getResponse = (GetResponse) o; + return Objects.equals(getResult, getResponse.getResult); + } + + @Override + public int hashCode() { + return Objects.hash(getResult); + } + @Override public String toString() { return Strings.toString(this, true); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index e1fe435fd10..4fc766e2b30 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -140,9 +140,7 @@ public class MultiGetResponse extends ActionResponse implements Iterable shardsAllocatedPredicate = newState -> activeShardCount.enoughShardsActive(newState, indexName); diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index ecb03b5c222..fbae9f7a12b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -121,12 +121,12 @@ public abstract class TransportMasterNodeAction masterChangePredicate = MasterNodeChangePredicate.build(clusterState); final DiscoveryNodes nodes = clusterState.nodes(); if (nodes.isLocalNodeElectedMaster() || localExecute(request)) { @@ -197,7 +197,7 @@ public abstract class TransportMasterNodeAction failuresList = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (TOTAL.equals(currentFieldName)) { + total = parser.intValue(); + } else if (SUCCESSFUL.equals(currentFieldName)) { + successful = parser.intValue(); + } else if (FAILED.equals(currentFieldName) == false) { + throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (FAILURES.equals(currentFieldName)) { + failuresList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + failuresList.add(Failure.fromXContent(parser)); + } + } else { + throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } + } + Failure[] failures = EMPTY; + if (failuresList != null) { + failures = failuresList.toArray(new Failure[failuresList.size()]); + } + return new ShardInfo(total, successful, failures); + } + @Override public String toString() { return "ShardInfo{" + @@ -338,6 +387,45 @@ public class ReplicationResponse extends ActionResponse { builder.endObject(); return builder; } + + public static Failure fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + + String shardIndex = null, nodeId = null; + int shardId = -1; + boolean primary = false; + RestStatus status = null; + ElasticsearchException reason = null; + + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (_INDEX.equals(currentFieldName)) { + shardIndex = parser.text(); + } else if (_SHARD.equals(currentFieldName)) { + shardId = parser.intValue(); + } else if (_NODE.equals(currentFieldName)) { + nodeId = parser.text(); + } else if (STATUS.equals(currentFieldName)) { + status = RestStatus.valueOf(parser.text()); + } else if (PRIMARY.equals(currentFieldName)) { + primary = parser.booleanValue(); + } else { + throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (REASON.equals(currentFieldName)) { + reason = ElasticsearchException.fromXContent(parser); + } else { + throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } + } + return new Failure(new ShardId(shardIndex, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), nodeId, reason, status, primary); + } } } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 139f37007c9..f03385e3829 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -638,7 +638,7 @@ public abstract class TransportReplicationAction< @Override protected void doRun() { setPhase(task, "routing"); - final ClusterState state = observer.observedState(); + final ClusterState state = observer.setAndGetObservedState(); if (handleBlockExceptions(state)) { return; } diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 7670eab102a..421e2458b0d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -111,7 +111,6 @@ public abstract class TransportInstanceSingleOperationAction listener) { this.request = request; @@ -119,13 +118,12 @@ public abstract class TransportInstanceSingleOperationAction() { @Override @@ -221,18 +219,18 @@ public abstract class TransportInstanceSingleOperationAction esSettings) { + private static Environment initialEnvironment(boolean foreground, Path pidFile, Settings initialSettings) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); if (pidFile != null) { builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); } - return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings); + builder.put(initialSettings); + return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, Collections.emptyMap()); } private void start() throws NodeValidationException { @@ -262,7 +261,7 @@ final class Bootstrap { final boolean foreground, final Path pidFile, final boolean quiet, - final Map esSettings) throws BootstrapException, NodeValidationException, UserException { + final Settings initialSettings) throws BootstrapException, NodeValidationException, UserException { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); @@ -272,7 +271,7 @@ final class Bootstrap { INSTANCE = new Bootstrap(); - Environment environment = initialEnvironment(foreground, pidFile, esSettings); + Environment environment = initialEnvironment(foreground, pidFile, initialSettings); try { LogConfigurator.configure(environment); } catch (IOException e) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java index 540a732dfae..07ae0f9033f 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java @@ -20,14 +20,14 @@ package org.elasticsearch.bootstrap; import java.nio.file.Path; -import java.util.Map; /** * Wrapper exception for checked exceptions thrown during the bootstrap process. Methods invoked * during bootstrap should explicitly declare the checked exceptions that they can throw, rather * than declaring the top-level checked exception {@link Exception}. This exception exists to wrap - * these checked exceptions so that {@link Bootstrap#init(boolean, Path, boolean, Map)} does not have to - * declare all of these checked exceptions. + * these checked exceptions so that + * {@link Bootstrap#init(boolean, Path, boolean, org.elasticsearch.common.settings.Settings)} + * does not have to declare all of these checked exceptions. */ class BootstrapException extends Exception { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 23a24d88f64..1e530184734 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -25,9 +25,11 @@ import joptsimple.OptionSpecBuilder; import joptsimple.util.PathConverter; import org.elasticsearch.Build; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.NodeValidationException; @@ -40,7 +42,7 @@ import java.util.Map; /** * This class starts elasticsearch. */ -class Elasticsearch extends SettingCommand { +class Elasticsearch extends EnvironmentAwareCommand { private final OptionSpecBuilder versionOption; private final OptionSpecBuilder daemonizeOption; @@ -90,7 +92,7 @@ class Elasticsearch extends SettingCommand { } @Override - protected void execute(Terminal terminal, OptionSet options, Map settings) throws UserException { + protected void execute(Terminal terminal, OptionSet options, Environment env) throws UserException { if (options.nonOptionArguments().isEmpty() == false) { throw new UserException(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments()); } @@ -109,16 +111,16 @@ class Elasticsearch extends SettingCommand { final boolean quiet = options.has(quietOption); try { - init(daemonize, pidFile, quiet, settings); + init(daemonize, pidFile, quiet, env.settings()); } catch (NodeValidationException e) { throw new UserException(ExitCodes.CONFIG, e.getMessage()); } } - void init(final boolean daemonize, final Path pidFile, final boolean quiet, final Map esSettings) + void init(final boolean daemonize, final Path pidFile, final boolean quiet, Settings initialSettings) throws NodeValidationException, UserException { try { - Bootstrap.init(!daemonize, pidFile, quiet, esSettings); + Bootstrap.init(!daemonize, pidFile, quiet, initialSettings); } catch (BootstrapException | RuntimeException e) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/main/java/org/elasticsearch/cli/SettingCommand.java b/core/src/main/java/org/elasticsearch/cli/EnvironmentAwareCommand.java similarity index 74% rename from core/src/main/java/org/elasticsearch/cli/SettingCommand.java rename to core/src/main/java/org/elasticsearch/cli/EnvironmentAwareCommand.java index 17f7c9e5204..b19fc4ca957 100644 --- a/core/src/main/java/org/elasticsearch/cli/SettingCommand.java +++ b/core/src/main/java/org/elasticsearch/cli/EnvironmentAwareCommand.java @@ -22,16 +22,20 @@ package org.elasticsearch.cli; import joptsimple.OptionSet; import joptsimple.OptionSpec; import joptsimple.util.KeyValuePair; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.util.HashMap; import java.util.Locale; import java.util.Map; -public abstract class SettingCommand extends Command { +/** A cli command which requires an {@link org.elasticsearch.env.Environment} to use current paths and settings. */ +public abstract class EnvironmentAwareCommand extends Command { private final OptionSpec settingOption; - public SettingCommand(String description) { + public EnvironmentAwareCommand(String description) { super(description); this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class); } @@ -51,9 +55,15 @@ public abstract class SettingCommand extends Command { putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home"); putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs"); - execute(terminal, options, settings); + execute(terminal, options, createEnv(terminal, settings)); } + /** Create an {@link Environment} for the command to use. Overrideable for tests. */ + protected Environment createEnv(Terminal terminal, Map settings) { + return InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + } + + /** Ensure the given setting exists, reading it from system properties if not already set. */ protected static void putSystemPropertyIfSettingIsMissing(final Map settings, final String setting, final String key) { final String value = System.getProperty(key); if (value != null) { @@ -72,6 +82,7 @@ public abstract class SettingCommand extends Command { } } - protected abstract void execute(Terminal terminal, OptionSet options, Map settings) throws Exception; + /** Execute the command with the initialized {@link Environment}. */ + protected abstract void execute(Terminal terminal, OptionSet options, Environment env) throws Exception; } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 4b9e4b1eac6..79f39d143f5 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -63,8 +64,11 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; /** @@ -140,6 +144,11 @@ public abstract class TransportClient extends AbstractClient { .flatMap(p -> p.getNamedWriteables().stream()) .collect(Collectors.toList())); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries); + NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(Stream.of( + searchModule.getNamedXContents().stream(), + pluginsService.filterPlugins(Plugin.class).stream() + .flatMap(p -> p.getNamedXContent().stream()) + ).flatMap(Function.identity()).collect(toList())); ModulesBuilder modules = new ModulesBuilder(); // plugin modules must be added here, before others or we can get crazy injection errors... @@ -158,7 +167,7 @@ public abstract class TransportClient extends AbstractClient { resourcesToClose.add(bigArrays); modules.add(settingsModule); NetworkModule networkModule = new NetworkModule(settings, true, pluginsService.filterPlugins(NetworkPlugin.class), threadPool, - bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); + bigArrays, circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService); final Transport transport = networkModule.getTransportSupplier().get(); final TransportService transportService = new TransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), null); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 7b6f2b55aa9..34c9b64256a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,6 +41,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.compress.CompressedXContent; @@ -49,8 +51,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.PublishClusterStateAction; @@ -403,11 +404,7 @@ public class ClusterState implements ToXContent, Diffable { builder.startObject("mappings"); for (ObjectObjectCursor cursor1 : templateMetaData.mappings()) { - byte[] mappingSource = cursor1.value.uncompressed(); - Map mapping; - try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) { - mapping = parser.map(); - } + Map mapping = XContentHelper.convertToMap(new BytesArray(cursor1.value.uncompressed()), false).v2(); if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) { // the type name is the root value, reduce it mapping = (Map) mapping.get(cursor1.key); @@ -435,11 +432,8 @@ public class ClusterState implements ToXContent, Diffable { builder.startObject("mappings"); for (ObjectObjectCursor cursor : indexMetaData.getMappings()) { - byte[] mappingSource = cursor.value.source().uncompressed(); - Map mapping; - try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) { - mapping = parser.map(); - } + Map mapping = XContentHelper + .convertToMap(new BytesArray(cursor.value.source().uncompressed()), false).v2(); if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { // the type name is the root value, reduce it mapping = (Map) mapping.get(cursor.key); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index 6ca815b2fab..cad98198a80 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; @@ -45,7 +46,7 @@ public class ClusterStateObserver { volatile TimeValue timeOutValue; - final AtomicReference lastObservedState; + final AtomicReference lastObservedState; final TimeoutClusterStateListener clusterStateListener = new ObserverClusterStateListener(); // observingContext is not null when waiting on cluster state changes final AtomicReference observingContext = new AtomicReference<>(null); @@ -73,7 +74,7 @@ public class ClusterStateObserver { public ClusterStateObserver(ClusterState initialState, ClusterService clusterService, @Nullable TimeValue timeout, Logger logger, ThreadContext contextHolder) { this.clusterService = clusterService; - this.lastObservedState = new AtomicReference<>(initialState); + this.lastObservedState = new AtomicReference<>(new StoredState(initialState)); this.timeOutValue = timeout; if (timeOutValue != null) { this.startTimeNS = System.nanoTime(); @@ -82,11 +83,14 @@ public class ClusterStateObserver { this.contextHolder = contextHolder; } - /** last cluster state and status observed by this observer. Note that this may not be the current one */ - public ClusterState observedState() { - ClusterState state = lastObservedState.get(); - assert state != null; - return state; + /** sets the last observed state to the currently applied cluster state and returns it */ + public ClusterState setAndGetObservedState() { + if (observingContext.get() != null) { + throw new ElasticsearchException("cannot set current cluster state while waiting for a cluster state change"); + } + ClusterState clusterState = clusterService.state(); + lastObservedState.set(new StoredState(clusterState)); + return clusterState; } /** indicates whether this observer has timedout */ @@ -130,7 +134,7 @@ public class ClusterStateObserver { logger.trace("observer timed out. notifying listener. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry timedOut = true; - lastObservedState.set(clusterService.state()); + lastObservedState.set(new StoredState(clusterService.state())); listener.onTimeout(timeOutValue); return; } @@ -146,11 +150,10 @@ public class ClusterStateObserver { // sample a new state ClusterState newState = clusterService.state(); - ClusterState lastState = lastObservedState.get(); - if (newState != lastState && statePredicate.test(newState)) { + if (lastObservedState.get().sameState(newState) == false && statePredicate.test(newState)) { // good enough, let's go. logger.trace("observer: sampled state accepted by predicate ({})", newState); - lastObservedState.set(newState); + lastObservedState.set(new StoredState(newState)); listener.onNewClusterState(newState); } else { logger.trace("observer: sampled state rejected by predicate ({}). adding listener to ClusterService", newState); @@ -177,7 +180,7 @@ public class ClusterStateObserver { if (observingContext.compareAndSet(context, null)) { clusterService.removeTimeoutListener(this); logger.trace("observer: accepting cluster state change ({})", state); - lastObservedState.set(state); + lastObservedState.set(new StoredState(state)); context.listener.onNewClusterState(state); } else { logger.trace("observer: predicate approved change but observing context has changed - ignoring (new cluster state version [{}])", state.version()); @@ -195,13 +198,12 @@ public class ClusterStateObserver { return; } ClusterState newState = clusterService.state(); - ClusterState lastState = lastObservedState.get(); - if (newState != lastState && context.statePredicate.test(newState)) { + if (lastObservedState.get().sameState(newState) == false && context.statePredicate.test(newState)) { // double check we're still listening if (observingContext.compareAndSet(context, null)) { logger.trace("observer: post adding listener: accepting current cluster state ({})", newState); clusterService.removeTimeoutListener(this); - lastObservedState.set(newState); + lastObservedState.set(new StoredState(newState)); context.listener.onNewClusterState(newState); } else { logger.trace("observer: postAdded - predicate approved state but observing context has changed - ignoring ({})", newState); @@ -230,13 +232,30 @@ public class ClusterStateObserver { long timeSinceStartMS = TimeValue.nsecToMSec(System.nanoTime() - startTimeNS); logger.trace("observer: timeout notification from cluster service. timeout setting [{}], time since start [{}]", timeOutValue, new TimeValue(timeSinceStartMS)); // update to latest, in case people want to retry - lastObservedState.set(clusterService.state()); + lastObservedState.set(new StoredState(clusterService.state())); timedOut = true; context.listener.onTimeout(timeOutValue); } } } + /** + * The observer considers two cluster states to be the same if they have the same version and master node id (i.e. null or set) + */ + private static class StoredState { + private final String masterNodeId; + private final long version; + + public StoredState(ClusterState clusterState) { + this.masterNodeId = clusterState.nodes().getMasterNodeId(); + this.version = clusterState.version(); + } + + public boolean sameState(ClusterState clusterState) { + return version == clusterState.version() && Objects.equals(masterNodeId, clusterState.nodes().getMasterNodeId()); + } + } + public interface Listener { /** called when a new state is observed */ diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 0cf124612d0..14b028042a0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -69,6 +69,7 @@ import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; +import java.util.function.Predicate; public class ShardStateAction extends AbstractComponent { @@ -91,11 +92,13 @@ public class ShardStateAction extends AbstractComponent { transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); } - private void sendShardAction(final String actionName, final ClusterStateObserver observer, final ShardEntry shardEntry, final Listener listener) { - DiscoveryNode masterNode = observer.observedState().nodes().getMasterNode(); + private void sendShardAction(final String actionName, final ClusterState currentState, final ShardEntry shardEntry, final Listener listener) { + ClusterStateObserver observer = new ClusterStateObserver(currentState, clusterService, null, logger, threadPool.getThreadContext()); + DiscoveryNode masterNode = currentState.nodes().getMasterNode(); + Predicate changePredicate = MasterNodeChangePredicate.build(currentState); if (masterNode == null) { logger.warn("{} no master known for action [{}] for shard entry [{}]", shardEntry.shardId, actionName, shardEntry); - waitForNewMasterAndRetry(actionName, observer, shardEntry, listener); + waitForNewMasterAndRetry(actionName, observer, shardEntry, listener, changePredicate); } else { logger.debug("{} sending [{}] to [{}] for shard entry [{}]", shardEntry.shardId, actionName, masterNode.getId(), shardEntry); transportService.sendRequest(masterNode, @@ -108,7 +111,7 @@ public class ShardStateAction extends AbstractComponent { @Override public void handleException(TransportException exp) { if (isMasterChannelException(exp)) { - waitForNewMasterAndRetry(actionName, observer, shardEntry, listener); + waitForNewMasterAndRetry(actionName, observer, shardEntry, listener, changePredicate); } else { logger.warn((Supplier) () -> new ParameterizedMessage("{} unexpected failure while sending request [{}] to [{}] for shard entry [{}]", shardEntry.shardId, actionName, masterNode, shardEntry), exp); listener.onFailure(exp instanceof RemoteTransportException ? (Exception) (exp.getCause() instanceof Exception ? exp.getCause() : new ElasticsearchException(exp.getCause())) : exp); @@ -162,20 +165,19 @@ public class ShardStateAction extends AbstractComponent { private void shardFailed(final ShardId shardId, String allocationId, long primaryTerm, final String message, @Nullable final Exception failure, Listener listener, ClusterState currentState) { - ClusterStateObserver observer = new ClusterStateObserver(currentState, clusterService, null, logger, threadPool.getThreadContext()); ShardEntry shardEntry = new ShardEntry(shardId, allocationId, primaryTerm, message, failure); - sendShardAction(SHARD_FAILED_ACTION_NAME, observer, shardEntry, listener); + sendShardAction(SHARD_FAILED_ACTION_NAME, currentState, shardEntry, listener); } // visible for testing - protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardEntry shardEntry, Listener listener) { + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardEntry shardEntry, Listener listener, Predicate changePredicate) { observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { if (logger.isTraceEnabled()) { logger.trace("new cluster state [{}] after waiting for master election to fail shard entry [{}]", state, shardEntry); } - sendShardAction(actionName, observer, shardEntry, listener); + sendShardAction(actionName, state, shardEntry, listener); } @Override @@ -189,7 +191,7 @@ public class ShardStateAction extends AbstractComponent { // we wait indefinitely for a new master assert false; } - }, MasterNodeChangePredicate.build(observer.observedState())); + }, changePredicate); } private static class ShardFailedTransportHandler implements TransportRequestHandler { @@ -354,9 +356,8 @@ public class ShardStateAction extends AbstractComponent { shardStarted(shardRouting, message, listener, clusterService.state()); } public void shardStarted(final ShardRouting shardRouting, final String message, Listener listener, ClusterState currentState) { - ClusterStateObserver observer = new ClusterStateObserver(currentState, clusterService, null, logger, threadPool.getThreadContext()); ShardEntry shardEntry = new ShardEntry(shardRouting.shardId(), shardRouting.allocationId().getId(), 0L, message, null); - sendShardAction(SHARD_STARTED_ACTION_NAME, observer, shardEntry, listener); + sendShardAction(SHARD_STARTED_ACTION_NAME, currentState, shardEntry, listener); } private static class ShardStartedTransportHandler implements TransportRequestHandler { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index e62a3935ad5..647b355cc0e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -22,12 +22,14 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -225,14 +227,7 @@ public class AliasMetaData extends AbstractDiffable { this.filter = null; return this; } - try { - try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) { - filter(parser.mapOrdered()); - } - return this; - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); - } + return filter(XContentHelper.convertToMap(XContentFactory.xContent(filter), filter, true)); } public Builder filter(Map filter) { @@ -286,11 +281,7 @@ public class AliasMetaData extends AbstractDiffable { if (binary) { builder.field("filter", aliasMetaData.filter.compressed()); } else { - byte[] data = aliasMetaData.filter().uncompressed(); - try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) { - Map filter = parser.mapOrdered(); - builder.field("filter", filter); - } + builder.field("filter", XContentHelper.convertToMap(new BytesArray(aliasMetaData.filter().uncompressed()), true).v2()); } } if (aliasMetaData.indexRouting() != null) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index 29faa0f7956..bce6e45c793 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; @@ -74,8 +76,8 @@ public class AliasValidator extends AbstractComponent { public void validateAliasStandalone(Alias alias) { validateAliasStandalone(alias.name(), alias.indexRouting()); if (Strings.hasLength(alias.filter())) { - try (XContentParser parser = XContentFactory.xContent(alias.filter()).createParser(alias.filter())) { - parser.map(); + try { + XContentHelper.convertToMap(XContentFactory.xContent(alias.filter()), alias.filter(), false); } catch (Exception e) { throw new IllegalArgumentException("failed to parse filter for alias [" + alias.name() + "]", e); } @@ -113,9 +115,10 @@ public class AliasValidator extends AbstractComponent { * provided {@link org.elasticsearch.index.query.QueryShardContext} * @throws IllegalArgumentException if the filter is not valid */ - public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) { + public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext, + NamedXContentRegistry xContentRegistry) { assert queryShardContext != null; - try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) { + try (XContentParser parser = XContentFactory.xContent(filter).createParser(xContentRegistry, filter)) { validateAliasFilter(parser, queryShardContext); } catch (Exception e) { throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e); @@ -127,9 +130,10 @@ public class AliasValidator extends AbstractComponent { * provided {@link org.elasticsearch.index.query.QueryShardContext} * @throws IllegalArgumentException if the filter is not valid */ - public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) { + public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext, + NamedXContentRegistry xContentRegistry) { assert queryShardContext != null; - try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) { + try (XContentParser parser = XContentFactory.xContent(filter).createParser(xContentRegistry, filter)) { validateAliasFilter(parser, queryShardContext); } catch (Exception e) { throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index a1f217e1377..b28ec4a0c86 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.Diff; @@ -34,6 +35,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeFilters; import org.elasticsearch.cluster.routing.allocation.IndexMetaDataUpdater; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; @@ -48,6 +50,7 @@ import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.MetaDataStateFormat; @@ -831,9 +834,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild } public Builder putMapping(String type, String source) throws IOException { - try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { - putMapping(new MappingMetaData(type, parser.mapOrdered())); - } + putMapping(new MappingMetaData(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true))); return this; } @@ -1047,11 +1048,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild if (binary) { builder.value(cursor.value.source().compressed()); } else { - byte[] data = cursor.value.source().uncompressed(); - try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) { - Map mapping = parser.mapOrdered(); - builder.map(mapping); - } + builder.map(XContentHelper.convertToMap(new BytesArray(cursor.value.source().uncompressed()), true).v2()); } } builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index feabc380c4e..7ffa4878fe7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; @@ -37,6 +38,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -395,10 +397,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { byte[] mappingSource = cursor.value.uncompressed(); - Map mapping; - try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {; - mapping = parser.map(); - } + Map mapping = XContentHelper.convertToMap(new BytesArray(mappingSource), false).v2(); if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { // the type name is the root value, reduce it mapping = (Map) mapping.get(cursor.key); @@ -411,10 +410,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { byte[] data = cursor.value.uncompressed(); - try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) { - Map mapping = parser.mapOrdered(); - builder.map(mapping); - } + builder.map(XContentHelper.convertToMap(new BytesArray(data), true).v2()); } builder.endArray(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 0f9db99326d..39fb3e381f7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; @@ -89,10 +88,7 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData(CompressedXContent mapping) throws IOException { this.source = mapping; - Map mappingMap; - try (XContentParser parser = XContentHelper.createParser(mapping.compressedReference())) { - mappingMap = parser.mapOrdered(); - } + Map mappingMap = XContentHelper.convertToMap(mapping.compressedReference(), true).v2(); if (mappingMap.size() != 1) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 9d81939995a..1cbc81cb26c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -59,6 +59,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; @@ -113,12 +114,14 @@ public class MetaDataCreateIndexService extends AbstractComponent { private final Environment env; private final IndexScopedSettings indexScopedSettings; private final ActiveShardsObserver activeShardsObserver; + private final NamedXContentRegistry xContentRegistry; @Inject public MetaDataCreateIndexService(Settings settings, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, AliasValidator aliasValidator, Environment env, - IndexScopedSettings indexScopedSettings, ThreadPool threadPool) { + IndexScopedSettings indexScopedSettings, ThreadPool threadPool, + NamedXContentRegistry xContentRegistry) { super(settings); this.clusterService = clusterService; this.indicesService = indicesService; @@ -127,6 +130,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { this.env = env; this.indexScopedSettings = indexScopedSettings; this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); + this.xContentRegistry = xContentRegistry; } /** @@ -248,7 +252,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { List templateNames = new ArrayList<>(); for (Map.Entry entry : request.mappings().entrySet()) { - mappings.put(entry.getKey(), MapperService.parseMapping(entry.getValue())); + mappings.put(entry.getKey(), MapperService.parseMapping(xContentRegistry, entry.getValue())); } for (Map.Entry entry : request.customs().entrySet()) { @@ -260,9 +264,10 @@ public class MetaDataCreateIndexService extends AbstractComponent { templateNames.add(template.getName()); for (ObjectObjectCursor cursor : template.mappings()) { if (mappings.containsKey(cursor.key)) { - XContentHelper.mergeDefaults(mappings.get(cursor.key), MapperService.parseMapping(cursor.value.string())); + XContentHelper.mergeDefaults(mappings.get(cursor.key), + MapperService.parseMapping(xContentRegistry, cursor.value.string())); } else { - mappings.put(cursor.key, MapperService.parseMapping(cursor.value.string())); + mappings.put(cursor.key, MapperService.parseMapping(xContentRegistry, cursor.value.string())); } } // handle custom @@ -368,12 +373,13 @@ public class MetaDataCreateIndexService extends AbstractComponent { final QueryShardContext queryShardContext = indexService.newQueryShardContext(0, null, () -> 0L); for (Alias alias : request.aliases()) { if (Strings.hasLength(alias.filter())) { - aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext); + aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext, xContentRegistry); } } for (AliasMetaData aliasMetaData : templatesAliases.values()) { if (aliasMetaData.filter() != null) { - aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext); + aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), + queryShardContext, xContentRegistry); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index f1584ee325c..81fbddce46a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; @@ -64,18 +65,17 @@ public class MetaDataIndexAliasesService extends AbstractComponent { private final MetaDataDeleteIndexService deleteIndexService; + private final NamedXContentRegistry xContentRegistry; + @Inject - public MetaDataIndexAliasesService( - Settings settings, - ClusterService clusterService, - IndicesService indicesService, - AliasValidator aliasValidator, - MetaDataDeleteIndexService deleteIndexService) { + public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, + AliasValidator aliasValidator, MetaDataDeleteIndexService deleteIndexService, NamedXContentRegistry xContentRegistry) { super(settings); this.clusterService = clusterService; this.indicesService = indicesService; this.aliasValidator = aliasValidator; this.deleteIndexService = deleteIndexService; + this.xContentRegistry = xContentRegistry; } public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, @@ -151,7 +151,8 @@ public class MetaDataIndexAliasesService extends AbstractComponent { } // the context is only used for validation so it's fine to pass fake values for the shard id and the current // timestamp - aliasValidator.validateAliasFilter(alias, filter, indexService.newQueryShardContext(0, null, () -> 0L)); + aliasValidator.validateAliasFilter(alias, filter, indexService.newQueryShardContext(0, null, () -> 0L), + xContentRegistry); } }; changed |= action.apply(newAliasValidator, metadata, index); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 2e11f1e7f45..b8c19fbeb46 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperParsingException; @@ -65,18 +66,20 @@ public class MetaDataIndexTemplateService extends AbstractComponent { private final IndicesService indicesService; private final MetaDataCreateIndexService metaDataCreateIndexService; private final IndexScopedSettings indexScopedSettings; + private final NamedXContentRegistry xContentRegistry; @Inject public MetaDataIndexTemplateService(Settings settings, ClusterService clusterService, MetaDataCreateIndexService metaDataCreateIndexService, AliasValidator aliasValidator, IndicesService indicesService, - IndexScopedSettings indexScopedSettings) { + IndexScopedSettings indexScopedSettings, NamedXContentRegistry xContentRegistry) { super(settings); this.clusterService = clusterService; this.aliasValidator = aliasValidator; this.indicesService = indicesService; this.metaDataCreateIndexService = metaDataCreateIndexService; this.indexScopedSettings = indexScopedSettings; + this.xContentRegistry = xContentRegistry; } public void removeTemplates(final RemoveRequest request, final RemoveListener listener) { @@ -165,7 +168,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { throw new IllegalArgumentException("index_template [" + request.name + "] already exists"); } - validateAndAddTemplate(request, templateBuilder, indicesService); + validateAndAddTemplate(request, templateBuilder, indicesService, xContentRegistry); for (Alias alias : request.aliases) { AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter()) @@ -190,7 +193,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { } private static void validateAndAddTemplate(final PutRequest request, IndexTemplateMetaData.Builder templateBuilder, - IndicesService indicesService) throws Exception { + IndicesService indicesService, NamedXContentRegistry xContentRegistry) throws Exception { Index createdIndex = null; final String temporaryIndexName = UUIDs.randomBase64UUID(); try { @@ -220,7 +223,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } - mappingsForValidation.put(entry.getKey(), MapperService.parseMapping(entry.getValue())); + mappingsForValidation.put(entry.getKey(), MapperService.parseMapping(xContentRegistry, entry.getValue())); } dummyIndexService.mapperService().merge(mappingsForValidation, MergeReason.MAPPING_UPDATE, false); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index e299874990b..1779699d448 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -50,12 +51,15 @@ import java.util.Set; */ public class MetaDataIndexUpgradeService extends AbstractComponent { + private final NamedXContentRegistry xContentRegistry; private final MapperRegistry mapperRegistry; private final IndexScopedSettings indexScopedSettings; @Inject - public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) { + public MetaDataIndexUpgradeService(Settings settings, NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry, + IndexScopedSettings indexScopedSettings) { super(settings); + this.xContentRegistry = xContentRegistry; this.mapperRegistry = mapperRegistry; this.indexScopedSettings = indexScopedSettings; } @@ -146,7 +150,8 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } }; try (IndexAnalyzers fakeIndexAnalzyers = new IndexAnalyzers(indexSettings, fakeDefault, fakeDefault, fakeDefault, analyzerMap)) { - MapperService mapperService = new MapperService(indexSettings, fakeIndexAnalzyers, similarityService, mapperRegistry, () -> null); + MapperService mapperService = new MapperService(indexSettings, fakeIndexAnalzyers, xContentRegistry, similarityService, + mapperRegistry, () -> null); mapperService.merge(indexMetaData, MapperService.MergeReason.MAPPING_RECOVERY, false); } } catch (Exception ex) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 93c45e7832f..4fa3225f468 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -125,7 +125,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation, boolean moveToNode) { if (awarenessAttributes.length == 0) { return allocation.decision(Decision.YES, NAME, - "allocation awareness is not enabled, set [%s] to enable it", + "allocation awareness is not enabled, set cluster setting [%s] to enable it", CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey()); } @@ -135,7 +135,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { // the node the shard exists on must be associated with an awareness attribute if (!node.node().getAttributes().containsKey(awarenessAttribute)) { return allocation.decision(Decision.NO, NAME, - "node does not contain the awareness attribute [%s]; required attributes [%s=%s]", + "node does not contain the awareness attribute [%s]; required attributes cluster setting [%s=%s]", awarenessAttribute, CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), allocation.debugDecision() ? Strings.arrayToCommaDelimitedString(awarenessAttributes) : null); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 4e4fb58799b..281f6a603c3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -120,13 +120,13 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { // check if there are unassigned primaries. if ( allocation.routingNodes().hasUnassignedPrimaries() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has unassigned primary shards and [%s] is set to [%s]", + "the cluster has unassigned primary shards and cluster setting [%s] is set to [%s]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } // check if there are initializing primaries that don't have a relocatingNodeId entry. if ( allocation.routingNodes().hasInactivePrimaries() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has inactive primary shards and [%s] is set to [%s]", + "the cluster has inactive primary shards and cluster setting [%s] is set to [%s]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } @@ -136,14 +136,14 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { // check if there are unassigned shards. if (allocation.routingNodes().hasUnassignedShards() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has unassigned shards and [%s] is set to [%s]", + "the cluster has unassigned shards and cluster setting [%s] is set to [%s]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } // in case all indices are assigned, are there initializing shards which // are not relocating? if ( allocation.routingNodes().hasInactiveShards() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has inactive shards and [%s] is set to [%s]", + "the cluster has inactive shards and cluster setting [%s] is set to [%s]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 6ec123ddab3..63fbad59b92 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -67,7 +67,7 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { int relocatingShards = allocation.routingNodes().getRelocatingShardCount(); if (relocatingShards >= clusterConcurrentRebalance) { return allocation.decision(Decision.THROTTLE, NAME, - "reached the limit of concurrently rebalancing shards [%d], [%s=%d]", + "reached the limit of concurrently rebalancing shards [%d], cluster setting [%s=%d]", relocatingShards, CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), clusterConcurrentRebalance); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 5eb1ae1751e..56663be1ef4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -138,7 +138,8 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the low watermark [%s=%s], having less than the minimum required [%s] free space, actual free: [%s]", + "the node is above the low watermark cluster setting [%s=%s], having less than the minimum required [%s] free " + + "space, actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getLowWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytes)); @@ -162,8 +163,8 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the high watermark [%s=%s], having less than the minimum required [%s] free space, " + - "actual free: [%s]", + "the node is above the high watermark cluster setting [%s=%s], having less than the minimum required [%s] free " + + "space, actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); @@ -180,8 +181,8 @@ public class DiskThresholdDecider extends AllocationDecider { Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the low watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + - "actual free: [%s%%]", + "the node is above the low watermark cluster setting [%s=%s], using more disk space than the maximum allowed " + + "[%s%%], actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getLowWatermarkRaw(), usedDiskThresholdLow, freeDiskPercentage); } else if (freeDiskPercentage > diskThresholdSettings.getFreeDiskThresholdHigh()) { @@ -206,8 +207,8 @@ public class DiskThresholdDecider extends AllocationDecider { Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the high watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + - "actual free: [%s%%]", + "the node is above the high watermark cluster setting [%s=%s], using more disk space than the maximum allowed " + + "[%s%%], actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeDiskPercentage); } @@ -222,7 +223,7 @@ public class DiskThresholdDecider extends AllocationDecider { "{} free bytes threshold ({} bytes free), preventing allocation", node.nodeId(), diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesAfterShard); return allocation.decision(Decision.NO, NAME, - "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + + "allocating the shard to this node will bring the node above the high watermark cluster setting [%s=%s] " + "and cause it to have less than the minimum required [%s] of free space (free bytes after shard added: [%s])", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), @@ -234,7 +235,7 @@ public class DiskThresholdDecider extends AllocationDecider { node.nodeId(), Strings.format1Decimals(diskThresholdSettings.getFreeDiskThresholdHigh(), "%"), Strings.format1Decimals(freeSpaceAfterShard, "%")); return allocation.decision(Decision.NO, NAME, - "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + + "allocating the shard to this node will bring the node above the high watermark cluster setting [%s=%s] " + "and cause it to use more disk space than the maximum allowed [%s%%] (free space after shard added: [%s%%])", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeSpaceAfterShard); @@ -279,7 +280,7 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + + "the shard cannot remain on this node because it is above the high watermark cluster setting [%s=%s] " + "and there is less than the required [%s] free space on node, actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), @@ -291,7 +292,7 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + + "the shard cannot remain on this node because it is above the high watermark cluster setting [%s=%s] " + "and there is less than the required [%s%%] free disk on node, actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index b6995c6b972..5a140c51936 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -189,10 +189,12 @@ public class EnableAllocationDecider extends AllocationDecider { } private static String setting(Rebalance rebalance, boolean usedIndexSetting) { - StringBuilder buf = new StringBuilder("["); + StringBuilder buf = new StringBuilder(); if (usedIndexSetting) { + buf.append("index setting ["); buf.append(INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey()); } else { + buf.append("cluster setting ["); buf.append(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey()); } buf.append("=").append(rebalance.toString().toLowerCase(Locale.ROOT)).append("]"); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index 21b6b3d1354..855c570a252 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -141,19 +141,19 @@ public class FilterAllocationDecider extends AllocationDecider { private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { if (indexMd.requireFilters() != null) { if (!indexMd.requireFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]", IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_PREFIX, indexMd.requireFilters()); } } if (indexMd.includeFilters() != null) { if (!indexMd.includeFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]", IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, indexMd.includeFilters()); } } if (indexMd.excludeFilters() != null) { if (indexMd.excludeFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node matches index setting [%s] filters [%s]", IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey(), indexMd.excludeFilters()); } } @@ -163,19 +163,19 @@ public class FilterAllocationDecider extends AllocationDecider { private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) { if (clusterRequireFilters != null) { if (!clusterRequireFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node does not match cluster setting [%s] filters [%s]", CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX, clusterRequireFilters); } } if (clusterIncludeFilters != null) { if (!clusterIncludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node does not cluster setting [%s] filters [%s]", CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX, clusterIncludeFilters); } } if (clusterExcludeFilters != null) { if (clusterExcludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches [%s] filters [%s]", + return allocation.decision(Decision.NO, NAME, "node matches cluster setting [%s] filters [%s]", CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX, clusterExcludeFilters); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java index 30fed539b79..387360f1a04 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java @@ -88,7 +88,8 @@ public class SameShardAllocationDecider extends AllocationDecider { String host = checkNodeOnSameHostAddress ? node.node().getHostAddress() : node.node().getHostName(); return allocation.decision(Decision.NO, NAME, "the shard cannot be allocated on host %s [%s], where it already exists on node [%s]; " + - "set [%s] to false to allow multiple nodes on the same host to hold the same shard copies", + "set cluster setting [%s] to false to allow multiple nodes on the same host to hold the same " + + "shard copies", hostType, host, node.nodeId(), CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING.getKey()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index e6f0fbcd645..2118d37fe47 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -122,12 +122,12 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { if (clusterShardLimit > 0 && decider.test(nodeShardCount, clusterShardLimit)) { return allocation.decision(Decision.NO, NAME, - "too many shards [%d] allocated to this node, [%s=%d]", + "too many shards [%d] allocated to this node, cluster setting [%s=%d]", nodeShardCount, CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), clusterShardLimit); } if (indexShardLimit > 0 && decider.test(indexShardCount, indexShardLimit)) { return allocation.decision(Decision.NO, NAME, - "too many shards [%d] allocated to this node for index [%s], [%s=%d]", + "too many shards [%d] allocated to this node for index [%s], index setting [%s=%d]", indexShardCount, shardRouting.getIndexName(), INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), indexShardLimit); } return allocation.decision(Decision.YES, NAME, @@ -157,7 +157,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { } if (clusterShardLimit >= 0 && nodeShardCount >= clusterShardLimit) { return allocation.decision(Decision.NO, NAME, - "too many shards [%d] allocated to this node, [%s=%d]", + "too many shards [%d] allocated to this node, cluster setting [%s=%d]", nodeShardCount, CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), clusterShardLimit); } return allocation.decision(Decision.YES, NAME, diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index a59f543ac3f..721de71435d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -126,7 +126,8 @@ public class ThrottlingAllocationDecider extends AllocationDecider { } if (primariesInRecovery >= primariesInitialRecoveries) { // TODO: Should index creation not be throttled for primary shards? - return allocation.decision(THROTTLE, NAME, "reached the limit of ongoing initial primary recoveries [%d], [%s=%d]", + return allocation.decision(THROTTLE, NAME, + "reached the limit of ongoing initial primary recoveries [%d], cluster setting [%s=%d]", primariesInRecovery, CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), primariesInitialRecoveries); } else { @@ -140,7 +141,7 @@ public class ThrottlingAllocationDecider extends AllocationDecider { int currentInRecoveries = allocation.routingNodes().getIncomingRecoveries(node.nodeId()); if (currentInRecoveries >= concurrentIncomingRecoveries) { return allocation.decision(THROTTLE, NAME, - "reached the limit of incoming shard recoveries [%d], [%s=%d] (can also be set via [%s])", + "reached the limit of incoming shard recoveries [%d], cluster setting [%s=%d] (can also be set via [%s])", currentInRecoveries, CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), concurrentIncomingRecoveries, CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey()); @@ -154,7 +155,7 @@ public class ThrottlingAllocationDecider extends AllocationDecider { if (primaryNodeOutRecoveries >= concurrentOutgoingRecoveries) { return allocation.decision(THROTTLE, NAME, "reached the limit of outgoing shard recoveries [%d] on the node [%s] which holds the primary, " + - "[%s=%d] (can also be set via [%s])", + "cluster setting [%s=%d] (can also be set via [%s])", primaryNodeOutRecoveries, node.nodeId(), CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), concurrentOutgoingRecoveries, diff --git a/core/src/main/java/org/elasticsearch/common/ParseField.java b/core/src/main/java/org/elasticsearch/common/ParseField.java index 7121be7d1d8..fc9377eeb2f 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseField.java +++ b/core/src/main/java/org/elasticsearch/common/ParseField.java @@ -101,14 +101,10 @@ public class ParseField { /** * @param fieldName * the field name to match against this {@link ParseField} - * @param strict - * if true an exception will be thrown if a deprecated field name - * is given. If false the deprecated name will be matched but a - * message will also be logged to the {@link DeprecationLogger} * @return true if fieldName matches any of the acceptable * names for this {@link ParseField}. */ - boolean match(String fieldName, boolean strict) { + public boolean match(String fieldName) { Objects.requireNonNull(fieldName, "fieldName cannot be null"); // if this parse field has not been completely deprecated then try to // match the preferred name @@ -128,11 +124,7 @@ public class ParseField { // message to indicate what should be used instead msg = "Deprecated field [" + fieldName + "] used, replaced by [" + allReplacedWith + "]"; } - if (strict) { - throw new IllegalArgumentException(msg); - } else { - DEPRECATION_LOGGER.deprecated(msg); - } + DEPRECATION_LOGGER.deprecated(msg); return true; } } diff --git a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java index 9866694a230..a7d412398e5 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java +++ b/core/src/main/java/org/elasticsearch/common/ParseFieldMatcher.java @@ -22,38 +22,29 @@ package org.elasticsearch.common; import org.elasticsearch.common.settings.Settings; /** - * Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField} - * against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting. + * Matcher to use in combination with {@link ParseField} while parsing requests. + * + * @deprecated This class used to be useful to parse in strict mode and emit errors rather than deprecation warnings. Now that we return + * warnings as response headers all the time, it is no longer useful and will soon be removed. The removal is in progress and there is + * already no strict mode in fact. Use {@link ParseField} directly. */ +@Deprecated public class ParseFieldMatcher { - public static final String PARSE_STRICT = "index.query.parse.strict"; - public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(false); - public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(true); - - private final boolean strict; + public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(Settings.EMPTY); + public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(Settings.EMPTY); public ParseFieldMatcher(Settings settings) { - this(settings.getAsBoolean(PARSE_STRICT, false)); - } - - public ParseFieldMatcher(boolean strict) { - this.strict = strict; - } - - /** Should deprecated settings be rejected? */ - public boolean isStrict() { - return strict; + //we don't do anything with the settings argument, this whole class will be soon removed } /** - * Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current - * value of the {@link #PARSE_STRICT} setting. + * Matches a {@link ParseField} against a field name, * @param fieldName the field name found in the request while parsing * @param parseField the parse field that we are looking for * @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field * @return true whenever the parse field that we are looking for was found, false otherwise */ public boolean match(String fieldName, ParseField parseField) { - return parseField.match(fieldName, strict); + return parseField.match(fieldName); } } diff --git a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java index ace0569a14a..55f89ce84ad 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +++ b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.logging; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -42,7 +41,7 @@ public class DeprecationLogger { * * https://tools.ietf.org/html/rfc7234#section-5.5 */ - public static final String DEPRECATION_HEADER = "Warning"; + public static final String WARNING_HEADER = "Warning"; /** * This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel. @@ -128,7 +127,7 @@ public class DeprecationLogger { while (iterator.hasNext()) { try { - iterator.next().addResponseHeader(DEPRECATION_HEADER, formattedMessage); + iterator.next().addResponseHeader(WARNING_HEADER, formattedMessage); } catch (IllegalStateException e) { // ignored; it should be removed shortly } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 530ecefd4cf..679a271deab 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.NetworkPlugin; @@ -107,13 +108,14 @@ public final class NetworkModule { BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { this.settings = settings; this.transportClient = transportClient; for (NetworkPlugin plugin : plugins) { if (transportClient == false && HTTP_ENABLED.get(settings)) { Map> httpTransportFactory = plugin.getHttpTransports(settings, threadPool, bigArrays, - circuitBreakerService, namedWriteableRegistry, networkService); + circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService); for (Map.Entry> entry : httpTransportFactory.entrySet()) { registerHttpTransport(entry.getKey(), entry.getValue()); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 5d9adbc34c1..6f52ea16097 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -702,7 +703,8 @@ public class Setting extends ToXContentToBytes { } private static List parseableStringToList(String parsableString) { - try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(parsableString)) { + // EMPTY is safe here because we never call namedObject + try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, parsableString)) { XContentParser.Token token = xContentParser.nextToken(); if (token != XContentParser.Token.START_ARRAY) { throw new IllegalArgumentException("expected START_ARRAY but got " + token); diff --git a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java index 30c62b91c79..d7eaa627a28 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java +++ b/core/src/main/java/org/elasticsearch/common/settings/loader/XContentSettingsLoader.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings.loader; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -46,14 +47,16 @@ public abstract class XContentSettingsLoader implements SettingsLoader { @Override public Map load(String source) throws IOException { - try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(source)) { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(NamedXContentRegistry.EMPTY, source)) { return load(parser); } } @Override public Map load(byte[] source) throws IOException { - try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(source)) { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(NamedXContentRegistry.EMPTY, source)) { return load(parser); } } diff --git a/core/src/main/java/org/elasticsearch/common/text/Text.java b/core/src/main/java/org/elasticsearch/common/text/Text.java index 39eb817fe3c..d895b7c11b0 100644 --- a/core/src/main/java/org/elasticsearch/common/text/Text.java +++ b/core/src/main/java/org/elasticsearch/common/text/Text.java @@ -100,7 +100,10 @@ public final class Text implements Comparable { @Override public boolean equals(Object obj) { - if (obj == null) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { return false; } return bytes().equals(((Text) obj).bytes()); diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java b/core/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java new file mode 100644 index 00000000000..046908d654e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java @@ -0,0 +1,211 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableMap; +import static java.util.Objects.requireNonNull; + +public class NamedXContentRegistry { + /** + * The empty {@link NamedXContentRegistry} for use when you are sure that you aren't going to call + * {@link XContentParser#namedObject(Class, String, Object)}. Be *very* careful with this singleton because a parser using it will fail + * every call to {@linkplain XContentParser#namedObject(Class, String, Object)}. Every non-test usage really should be checked thorowly + * and marked with a comment about how it was checked. That way anyone that sees code that uses it knows that it is potentially + * dangerous. + */ + public static final NamedXContentRegistry EMPTY = new NamedXContentRegistry(emptyList()); + + /** + * Parses an object with the type T from parser. + */ + public interface FromXContent { + /** + * Parses an object with the type T from parser. + */ + T fromXContent(XContentParser parser) throws IOException; + } + + /** + * Parses an object with the type T from parser. + * @deprecated prefer {@link FromXContent} if possible + */ + @Deprecated + public interface FromXContentWithContext { + T fromXContent(XContentParser parser, Object context) throws IOException; + } + + /** + * An entry in the {@linkplain NamedXContentRegistry} containing the name of the object and the parser that can parse it. + */ + public static class Entry { + /** The class that this entry can read. */ + public final Class categoryClass; + + /** A name for the entry which is unique within the {@link #categoryClass}. */ + public final ParseField name; + + /** A parser capability of parser the entry's class. */ + private final FromXContentWithContext parser; + + /** Creates a new entry which can be stored by the registry. */ + public Entry(Class categoryClass, ParseField name, FromXContent parser) { + this.categoryClass = Objects.requireNonNull(categoryClass); + this.name = Objects.requireNonNull(name); + this.parser = Objects.requireNonNull((p, c) -> parser.fromXContent(p)); + } + /** + * Creates a new entry which can be stored by the registry. + * @deprecated prefer {@link Entry#Entry(Class, ParseField, FromXContent)}. Contexts will be removed when possible + */ + @Deprecated + public Entry(Class categoryClass, ParseField name, FromXContentWithContext parser) { + this.categoryClass = Objects.requireNonNull(categoryClass); + this.name = Objects.requireNonNull(name); + this.parser = Objects.requireNonNull(parser); + } + } + + private final Map, Map> registry; + + public NamedXContentRegistry(List entries) { + if (entries.isEmpty()) { + registry = emptyMap(); + return; + } + entries = new ArrayList<>(entries); + entries.sort((e1, e2) -> e1.categoryClass.getName().compareTo(e2.categoryClass.getName())); + + Map, Map> registry = new HashMap<>(); + Map parsers = null; + Class currentCategory = null; + for (Entry entry : entries) { + if (currentCategory != entry.categoryClass) { + if (currentCategory != null) { + // we've seen the last of this category, put it into the big map + registry.put(currentCategory, unmodifiableMap(parsers)); + } + parsers = new HashMap<>(); + currentCategory = entry.categoryClass; + } + + for (String name : entry.name.getAllNamesIncludedDeprecated()) { + Object old = parsers.put(name, entry); + if (old != null) { + throw new IllegalArgumentException("NamedXContent [" + currentCategory.getName() + "][" + entry.name + "]" + + " is already registered for [" + old.getClass().getName() + "]," + + " cannot register [" + entry.parser.getClass().getName() + "]"); + } + } + } + // handle the last category + registry.put(currentCategory, unmodifiableMap(parsers)); + + this.registry = unmodifiableMap(registry); + } + + /** + * Parse a named object, throwing an exception if the parser isn't found. Throws an {@link ElasticsearchException} if the + * {@code categoryClass} isn't registered because this is almost always a bug. Throws a {@link UnknownNamedObjectException} if the + * {@code categoryClass} is registered but the {@code name} isn't. + */ + public T parseNamedObject(Class categoryClass, String name, XContentParser parser, C context) throws IOException { + Map parsers = registry.get(categoryClass); + if (parsers == null) { + if (registry.isEmpty()) { + // The "empty" registry will never work so we throw a better exception as a hint. + throw new ElasticsearchException("namedObject is not supported for this parser"); + } + throw new ElasticsearchException("Unknown namedObject category [" + categoryClass.getName() + "]"); + } + Entry entry = parsers.get(name); + if (entry == null) { + throw new UnknownNamedObjectException(parser.getTokenLocation(), categoryClass, name); + } + if (false == entry.name.match(name)) { + /* Note that this shouldn't happen because we already looked up the entry using the names but we need to call `match` anyway + * because it is responsible for logging deprecation warnings. */ + throw new ParsingException(parser.getTokenLocation(), + "Unknown " + categoryClass.getSimpleName() + " [" + name + "]: Parser didn't match"); + } + return categoryClass.cast(entry.parser.fromXContent(parser, context)); + } + + /** + * Thrown when {@link NamedXContentRegistry#parseNamedObject(Class, String, XContentParser, Object)} is called with an unregistered + * name. When this bubbles up to the rest layer it is converted into a response with {@code 400 BAD REQUEST} status. + */ + public static class UnknownNamedObjectException extends ParsingException { + private final String categoryClass; + private final String name; + + public UnknownNamedObjectException(XContentLocation contentLocation, Class categoryClass, + String name) { + super(contentLocation, "Unknown " + categoryClass.getSimpleName() + " [" + name + "]"); + this.categoryClass = requireNonNull(categoryClass, "categoryClass is required").getName(); + this.name = requireNonNull(name, "name is required"); + } + + /** + * Read from a stream. + */ + public UnknownNamedObjectException(StreamInput in) throws IOException { + super(in); + categoryClass = in.readString(); + name = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(categoryClass); + out.writeString(name); + } + + /** + * Category class that was missing a parser. This is a String instead of a class because the class might not be on the classpath + * of all nodes or it might be exclusive to a plugin or something. + */ + public String getCategoryClass() { + return categoryClass; + } + + /** + * Name of the missing parser. + */ + public String getName() { + return name; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContent.java index 72210f09d9b..e7dbeafe5d0 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -83,31 +83,31 @@ public interface XContent { /** * Creates a parser over the provided string content. */ - XContentParser createParser(String content) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, String content) throws IOException; /** * Creates a parser over the provided input stream. */ - XContentParser createParser(InputStream is) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, InputStream is) throws IOException; /** * Creates a parser over the provided bytes. */ - XContentParser createParser(byte[] data) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data) throws IOException; /** * Creates a parser over the provided bytes. */ - XContentParser createParser(byte[] data, int offset, int length) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data, int offset, int length) throws IOException; /** * Creates a parser over the provided bytes. */ - XContentParser createParser(BytesReference bytes) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException; /** * Creates a parser over the provided reader. */ - XContentParser createParser(Reader reader) throws IOException; + XContentParser createParser(NamedXContentRegistry xContentRegistry, Reader reader) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 1625289e528..e68a58a7506 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -41,7 +41,7 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; @SuppressWarnings("unchecked") public class XContentHelper { - public static XContentParser createParser(BytesReference bytes) throws IOException { + public static XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException { Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { InputStream compressedInput = compressor.streamInput(bytes.streamInput()); @@ -49,13 +49,14 @@ public class XContentHelper { compressedInput = new BufferedInputStream(compressedInput); } XContentType contentType = XContentFactory.xContentType(compressedInput); - return XContentFactory.xContent(contentType).createParser(compressedInput); + return XContentFactory.xContent(contentType).createParser(xContentRegistry, compressedInput); } else { - return XContentFactory.xContent(bytes).createParser(bytes.streamInput()); + return XContentFactory.xContent(bytes).createParser(xContentRegistry, bytes.streamInput()); } } - public static Tuple> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException { + public static Tuple> convertToMap(BytesReference bytes, boolean ordered) + throws ElasticsearchParseException { try { XContentType contentType; InputStream input; @@ -71,13 +72,34 @@ public class XContentHelper { contentType = XContentFactory.xContentType(bytes); input = bytes.streamInput(); } - try (XContentParser parser = XContentFactory.xContent(contentType).createParser(input)) { - if (ordered) { - return Tuple.tuple(contentType, parser.mapOrdered()); - } else { - return Tuple.tuple(contentType, parser.map()); - } - } + return new Tuple<>(contentType, convertToMap(XContentFactory.xContent(contentType), input, ordered)); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse content to map", e); + } + } + + /** + * Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link ElasticsearchParseException} if there is any + * error. + */ + public static Map convertToMap(XContent xContent, String string, boolean ordered) throws ElasticsearchParseException { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, string)) { + return ordered ? parser.mapOrdered() : parser.map(); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse content to map", e); + } + } + + /** + * Convert a string in some {@link XContent} format to a {@link Map}. Throws an {@link ElasticsearchParseException} if there is any + * error. Note that unlike {@link #convertToMap(BytesReference, boolean)}, this doesn't automatically uncompress the input. + */ + public static Map convertToMap(XContent xContent, InputStream input, boolean ordered) + throws ElasticsearchParseException { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, input)) { + return ordered ? parser.mapOrdered() : parser.map(); } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse content to map", e); } @@ -92,7 +114,9 @@ public class XContentHelper { if (xContentType == XContentType.JSON && !reformatJson) { return bytes.utf8ToString(); } - try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(bytes.streamInput())) { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, + bytes.streamInput())) { parser.nextToken(); XContentBuilder builder = XContentFactory.jsonBuilder(); if (prettyPrint) { @@ -191,7 +215,6 @@ public class XContentHelper { * Merges the defaults provided as the second parameter into the content of the first. Only does recursive merge * for inner maps. */ - @SuppressWarnings({"unchecked"}) public static void mergeDefaults(Map content, Map defaults) { for (Map.Entry defaultEntry : defaults.entrySet()) { if (!content.containsKey(defaultEntry.getKey())) { @@ -255,33 +278,36 @@ public class XContentHelper { return true; } - public static void copyCurrentStructure(XContentGenerator generator, XContentParser parser) throws IOException { + /** + * Low level implementation detail of {@link XContentGenerator#copyCurrentStructure(XContentParser)}. + */ + public static void copyCurrentStructure(XContentGenerator destination, XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); // Let's handle field-name separately first if (token == XContentParser.Token.FIELD_NAME) { - generator.writeFieldName(parser.currentName()); + destination.writeFieldName(parser.currentName()); token = parser.nextToken(); // fall-through to copy the associated value } switch (token) { case START_ARRAY: - generator.writeStartArray(); + destination.writeStartArray(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - copyCurrentStructure(generator, parser); + copyCurrentStructure(destination, parser); } - generator.writeEndArray(); + destination.writeEndArray(); break; case START_OBJECT: - generator.writeStartObject(); + destination.writeStartObject(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - copyCurrentStructure(generator, parser); + copyCurrentStructure(destination, parser); } - generator.writeEndObject(); + destination.writeEndObject(); break; default: // others are simple: - copyCurrentEvent(generator, parser); + copyCurrentEvent(destination, parser); } } @@ -349,4 +375,22 @@ public class XContentHelper { builder.rawField(field, source); } } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object depending on the value of the wrapInObject argument. + */ + public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, boolean wrapInObject) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + if (wrapInObject) { + builder.startObject(); + } + toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS); + if (wrapInObject) { + builder.endObject(); + } + return builder.bytes(); + } + } + } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java index e5ab2a9f4c7..7ca77442268 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java @@ -249,5 +249,16 @@ public interface XContentParser extends Releasable { */ XContentLocation getTokenLocation(); + // TODO remove context entirely when it isn't needed + /** + * Parse an object by name. + */ + T namedObject(Class categoryClass, String name, Object context) throws IOException; + + /** + * The registry used to resolve {@link #namedObject(Class, String, Object)}. Use this when building a sub-parser from this parser. + */ + NamedXContentRegistry getXContentRegistry(); + boolean isClosed(); } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java index a2180152444..846582fa5f4 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParser.Token; -import java.io.IOException; import java.util.Locale; import java.util.function.Supplier; @@ -35,34 +34,6 @@ public final class XContentParserUtils { private XContentParserUtils() { } - /** - * Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME} - * - * @return the token - * @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME} - */ - public static Token ensureFieldName(Token token, Supplier location) throws IOException { - return ensureType(Token.FIELD_NAME, token, location); - } - - /** - * Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME} and the the field name is equal to the provided one - * - * @return the token - * @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME} or is not equal to the given - * field name - */ - public static Token ensureFieldName(XContentParser parser, Token token, String fieldName) throws IOException { - Token t = ensureType(Token.FIELD_NAME, token, parser::getTokenLocation); - - String current = parser.currentName() != null ? parser.currentName() : ""; - if (current.equals(fieldName) == false) { - String message = "Failed to parse object: expecting field with name [%s] but found [%s]"; - throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, fieldName, current)); - } - return t; - } - /** * @throws ParsingException with a "unknown field found" reason */ @@ -72,16 +43,14 @@ public final class XContentParserUtils { } /** - * Makes sure that current token is of the expected type + * Makes sure that provided token is of the expected type * - * @return the token * @throws ParsingException if the token is not equal to the expected type */ - private static Token ensureType(Token expected, Token current, Supplier location) { - if (current != expected) { + public static void ensureExpectedToken(Token expected, Token actual, Supplier location) { + if (actual != expected) { String message = "Failed to parse object: expecting token of type [%s] but found [%s]"; - throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, current)); + throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, actual)); } - return current; } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index d79173cfc2b..56435fd364b 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentGenerator; @@ -78,33 +79,33 @@ public class CborXContent implements XContent { } @Override - public XContentParser createParser(String content) throws IOException { - return new CborXContentParser(cborFactory.createParser(new FastStringReader(content))); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, String content) throws IOException { + return new CborXContentParser(xContentRegistry, cborFactory.createParser(new FastStringReader(content))); } @Override - public XContentParser createParser(InputStream is) throws IOException { - return new CborXContentParser(cborFactory.createParser(is)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, InputStream is) throws IOException { + return new CborXContentParser(xContentRegistry, cborFactory.createParser(is)); } @Override - public XContentParser createParser(byte[] data) throws IOException { - return new CborXContentParser(cborFactory.createParser(data)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data) throws IOException { + return new CborXContentParser(xContentRegistry, cborFactory.createParser(data)); } @Override - public XContentParser createParser(byte[] data, int offset, int length) throws IOException { - return new CborXContentParser(cborFactory.createParser(data, offset, length)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data, int offset, int length) throws IOException { + return new CborXContentParser(xContentRegistry, cborFactory.createParser(data, offset, length)); } @Override - public XContentParser createParser(BytesReference bytes) throws IOException { - return createParser(bytes.streamInput()); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException { + return createParser(xContentRegistry, bytes.streamInput()); } @Override - public XContentParser createParser(Reader reader) throws IOException { - return new CborXContentParser(cborFactory.createParser(reader)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, Reader reader) throws IOException { + return new CborXContentParser(xContentRegistry, cborFactory.createParser(reader)); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java index 772a5322cc7..61b4886420f 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java @@ -20,13 +20,15 @@ package org.elasticsearch.common.xcontent.cbor; import com.fasterxml.jackson.core.JsonParser; + +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContentParser; public class CborXContentParser extends JsonXContentParser { - public CborXContentParser(JsonParser parser) { - super(parser); + public CborXContentParser(NamedXContentRegistry xContentRegistry, JsonParser parser) { + super(xContentRegistry, parser); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index 1b0b351e6ef..2e4393723e0 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentGenerator; @@ -79,32 +80,32 @@ public class JsonXContent implements XContent { } @Override - public XContentParser createParser(String content) throws IOException { - return new JsonXContentParser(jsonFactory.createParser(new FastStringReader(content))); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, String content) throws IOException { + return new JsonXContentParser(xContentRegistry, jsonFactory.createParser(new FastStringReader(content))); } @Override - public XContentParser createParser(InputStream is) throws IOException { - return new JsonXContentParser(jsonFactory.createParser(is)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, InputStream is) throws IOException { + return new JsonXContentParser(xContentRegistry, jsonFactory.createParser(is)); } @Override - public XContentParser createParser(byte[] data) throws IOException { - return new JsonXContentParser(jsonFactory.createParser(data)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data) throws IOException { + return new JsonXContentParser(xContentRegistry, jsonFactory.createParser(data)); } @Override - public XContentParser createParser(byte[] data, int offset, int length) throws IOException { - return new JsonXContentParser(jsonFactory.createParser(data, offset, length)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data, int offset, int length) throws IOException { + return new JsonXContentParser(xContentRegistry, jsonFactory.createParser(data, offset, length)); } @Override - public XContentParser createParser(BytesReference bytes) throws IOException { - return createParser(bytes.streamInput()); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException { + return createParser(xContentRegistry, bytes.streamInput()); } @Override - public XContentParser createParser(Reader reader) throws IOException { - return new JsonXContentParser(jsonFactory.createParser(reader)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, Reader reader) throws IOException { + return new JsonXContentParser(xContentRegistry, jsonFactory.createParser(reader)); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 763fac4c6a3..0742e4a716a 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -31,6 +31,7 @@ import com.fasterxml.jackson.core.util.JsonGeneratorDelegate; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentGenerator; @@ -312,7 +313,8 @@ public class JsonXContentGenerator implements XContentGenerator { throw new IllegalArgumentException("Can't write raw bytes whose xcontent-type can't be guessed"); } if (mayWriteRawData(contentType) == false) { - try (XContentParser parser = XContentFactory.xContent(contentType).createParser(content)) { + // EMPTY is safe here because we never call namedObject when writing raw data + try (XContentParser parser = XContentFactory.xContent(contentType).createParser(NamedXContentRegistry.EMPTY, content)) { parser.nextToken(); writeFieldName(name); copyCurrentStructure(parser); @@ -378,8 +380,9 @@ public class JsonXContentGenerator implements XContentGenerator { } protected void copyRawValue(BytesReference content, XContent xContent) throws IOException { + // EMPTY is safe here because we never call namedObject try (StreamInput input = content.streamInput(); - XContentParser parser = xContent.createParser(input)) { + XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, input)) { copyCurrentStructure(parser); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java index f7ed46a6496..e5c30208ed6 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java @@ -22,8 +22,10 @@ package org.elasticsearch.common.xcontent.json; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; + import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.AbstractXContentParser; @@ -35,7 +37,8 @@ public class JsonXContentParser extends AbstractXContentParser { final JsonParser parser; - public JsonXContentParser(JsonParser parser) { + public JsonXContentParser(NamedXContentRegistry xContentRegistry, JsonParser parser) { + super(xContentRegistry); this.parser = parser; } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index 643326cd82f..b43a13a9193 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentGenerator; @@ -79,32 +80,32 @@ public class SmileXContent implements XContent { } @Override - public XContentParser createParser(String content) throws IOException { - return new SmileXContentParser(smileFactory.createParser(new FastStringReader(content))); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, String content) throws IOException { + return new SmileXContentParser(xContentRegistry, smileFactory.createParser(new FastStringReader(content))); } @Override - public XContentParser createParser(InputStream is) throws IOException { - return new SmileXContentParser(smileFactory.createParser(is)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, InputStream is) throws IOException { + return new SmileXContentParser(xContentRegistry, smileFactory.createParser(is)); } @Override - public XContentParser createParser(byte[] data) throws IOException { - return new SmileXContentParser(smileFactory.createParser(data)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data) throws IOException { + return new SmileXContentParser(xContentRegistry, smileFactory.createParser(data)); } @Override - public XContentParser createParser(byte[] data, int offset, int length) throws IOException { - return new SmileXContentParser(smileFactory.createParser(data, offset, length)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data, int offset, int length) throws IOException { + return new SmileXContentParser(xContentRegistry, smileFactory.createParser(data, offset, length)); } @Override - public XContentParser createParser(BytesReference bytes) throws IOException { - return createParser(bytes.streamInput()); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException { + return createParser(xContentRegistry, bytes.streamInput()); } @Override - public XContentParser createParser(Reader reader) throws IOException { - return new SmileXContentParser(smileFactory.createParser(reader)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, Reader reader) throws IOException { + return new SmileXContentParser(xContentRegistry, smileFactory.createParser(reader)); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java index ad8e12e70bf..c7b4b8c000c 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java @@ -20,13 +20,15 @@ package org.elasticsearch.common.xcontent.smile; import com.fasterxml.jackson.core.JsonParser; + +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContentParser; public class SmileXContentParser extends JsonXContentParser { - public SmileXContentParser(JsonParser parser) { - super(parser); + public SmileXContentParser(NamedXContentRegistry xContentRegistry, JsonParser parser) { + super(xContentRegistry, parser); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index d13dcbd9c93..162e5f7fb7d 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.xcontent.support; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -49,7 +50,11 @@ public abstract class AbstractXContentParser implements XContentParser { } } + private final NamedXContentRegistry xContentRegistry; + public AbstractXContentParser(NamedXContentRegistry xContentRegistry) { + this.xContentRegistry = xContentRegistry; + } // The 3rd party parsers we rely on are known to silently truncate fractions: see // http://fasterxml.github.io/jackson-core/javadoc/2.3.0/com/fasterxml/jackson/core/JsonParser.html#getShortValue() @@ -356,6 +361,16 @@ public abstract class AbstractXContentParser implements XContentParser { return null; } + @Override + public T namedObject(Class categoryClass, String name, Object context) throws IOException { + return xContentRegistry.parseNamedObject(categoryClass, name, this, context); + } + + @Override + public NamedXContentRegistry getXContentRegistry() { + return xContentRegistry; + } + @Override public abstract boolean isClosed(); } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index 7413f05f583..56dda843c45 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentGenerator; @@ -74,32 +75,32 @@ public class YamlXContent implements XContent { } @Override - public XContentParser createParser(String content) throws IOException { - return new YamlXContentParser(yamlFactory.createParser(new FastStringReader(content))); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, String content) throws IOException { + return new YamlXContentParser(xContentRegistry, yamlFactory.createParser(new FastStringReader(content))); } @Override - public XContentParser createParser(InputStream is) throws IOException { - return new YamlXContentParser(yamlFactory.createParser(is)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, InputStream is) throws IOException { + return new YamlXContentParser(xContentRegistry, yamlFactory.createParser(is)); } @Override - public XContentParser createParser(byte[] data) throws IOException { - return new YamlXContentParser(yamlFactory.createParser(data)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data) throws IOException { + return new YamlXContentParser(xContentRegistry, yamlFactory.createParser(data)); } @Override - public XContentParser createParser(byte[] data, int offset, int length) throws IOException { - return new YamlXContentParser(yamlFactory.createParser(data, offset, length)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, byte[] data, int offset, int length) throws IOException { + return new YamlXContentParser(xContentRegistry, yamlFactory.createParser(data, offset, length)); } @Override - public XContentParser createParser(BytesReference bytes) throws IOException { - return createParser(bytes.streamInput()); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, BytesReference bytes) throws IOException { + return createParser(xContentRegistry, bytes.streamInput()); } @Override - public XContentParser createParser(Reader reader) throws IOException { - return new YamlXContentParser(yamlFactory.createParser(reader)); + public XContentParser createParser(NamedXContentRegistry xContentRegistry, Reader reader) throws IOException { + return new YamlXContentParser(xContentRegistry, yamlFactory.createParser(reader)); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java index 5efceac7dcf..c2fdcfa740b 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java @@ -20,13 +20,15 @@ package org.elasticsearch.common.xcontent.yaml; import com.fasterxml.jackson.core.JsonParser; + +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContentParser; public class YamlXContentParser extends JsonXContentParser { - public YamlXContentParser(JsonParser parser) { - super(parser); + public YamlXContentParser(NamedXContentRegistry xContentRegistry, JsonParser parser) { + super(xContentRegistry, parser); } @Override diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 71c3190e2ee..a4509315f16 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -35,6 +35,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -196,8 +197,9 @@ public abstract class MetaDataStateFormat { long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { - try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, - contentSize))) { + // It is safe to use EMPTY here because this never uses namedObject + try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, + new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } @@ -311,7 +313,8 @@ public abstract class MetaDataStateFormat { logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); continue; } - try (final XContentParser parser = XContentHelper.createParser(new BytesArray(data))) { + // EMPTY is safe here because no parser uses namedObject + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, new BytesArray(data))) { state = fromXContent(parser); } if (state == null) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 8389335d889..3734a4eab58 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.cache.query.DisabledQueryCache; @@ -320,6 +321,7 @@ public final class IndexModule { public IndexService newIndexService( NodeEnvironment environment, + NamedXContentRegistry xContentRegistry, IndexService.ShardStoreDeleter shardStoreDeleter, CircuitBreakerService circuitBreakerService, BigArrays bigArrays, @@ -362,18 +364,18 @@ public final class IndexModule { } else { queryCache = new DisabledQueryCache(indexSettings); } - return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter, - analysisRegistry, engineFactory.get(), circuitBreakerService, bigArrays, threadPool, scriptService, indicesQueriesRegistry, - clusterService, client, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, indicesFieldDataCache, - globalCheckpointSyncer, searchOperationListeners, indexOperationListeners); + return new IndexService(indexSettings, environment, xContentRegistry, new SimilarityService(indexSettings, similarities), + shardStoreDeleter, analysisRegistry, engineFactory.get(), circuitBreakerService, bigArrays, threadPool, scriptService, + indicesQueriesRegistry, clusterService, client, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, + indicesFieldDataCache, globalCheckpointSyncer, searchOperationListeners, indexOperationListeners); } /** * creates a new mapper service to do administrative work like mapping updates. This *should not* be used for document parsing. * doing so will result in an exception. */ - public MapperService newIndexMapperService(MapperRegistry mapperRegistry) throws IOException { - return new MapperService(indexSettings, analysisRegistry.build(indexSettings), + public MapperService newIndexMapperService(NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry) throws IOException { + return new MapperService(indexSettings, analysisRegistry.build(indexSettings), xContentRegistry, new SimilarityService(indexSettings, similarities), mapperRegistry, () -> { throw new UnsupportedOperationException("no index query shard context available"); }); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 5ba2889f4bb..11018d75b3b 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLockObtainFailedException; @@ -102,6 +103,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final IndexSearcherWrapper searcherWrapper; private final IndexCache indexCache; private final MapperService mapperService; + private final NamedXContentRegistry xContentRegistry; private final SimilarityService similarityService; private final EngineFactory engineFactory; private final IndexWarmer warmer; @@ -123,6 +125,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final Client client; public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv, + NamedXContentRegistry xContentRegistry, SimilarityService similarityService, ShardStoreDeleter shardStoreDeleter, AnalysisRegistry registry, @@ -146,8 +149,10 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust super(indexSettings); this.indexSettings = indexSettings; this.globalCheckpointSyncer = globalCheckpointSyncer; + this.xContentRegistry = xContentRegistry; this.similarityService = similarityService; - this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), similarityService, mapperRegistry, + this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), xContentRegistry, similarityService, + mapperRegistry, // we parse all percolator queries as they would be parsed on shard 0 () -> newQueryShardContext(0, null, () -> { throw new IllegalArgumentException("Percolator queries are not allowed to use the current timestamp"); @@ -236,6 +241,10 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust return mapperService; } + public NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } + public SimilarityService similarityService() { return similarityService; } @@ -469,7 +478,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) { return new QueryShardContext( shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), - similarityService(), scriptService, queryRegistry, + similarityService(), scriptService, xContentRegistry, queryRegistry, client, indexReader, nowInMillis); } diff --git a/core/src/main/java/org/elasticsearch/index/get/GetField.java b/core/src/main/java/org/elasticsearch/index/get/GetField.java index be3b8d6a257..3a0fa14acee 100644 --- a/core/src/main/java/org/elasticsearch/index/get/GetField.java +++ b/core/src/main/java/org/elasticsearch/index/get/GetField.java @@ -19,17 +19,25 @@ package org.elasticsearch.index.get; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Objects; -public class GetField implements Streamable, Iterable { +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +public class GetField implements Streamable, ToXContent, Iterable { private String name; private List values; @@ -38,8 +46,8 @@ public class GetField implements Streamable, Iterable { } public GetField(String name, List values) { - this.name = name; - this.values = values; + this.name = Objects.requireNonNull(name, "name must not be null"); + this.values = Objects.requireNonNull(values, "values must not be null"); } public String getName() { @@ -90,4 +98,69 @@ public class GetField implements Streamable, Iterable { out.writeGenericValue(obj); } } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(name); + for (Object value : values) { + //this call doesn't really need to support writing any kind of object. + //Stored fields values are converted using MappedFieldType#valueForDisplay. + //As a result they can either be Strings, Numbers, Booleans, or BytesReference, that's all. + builder.value(value); + } + builder.endArray(); + return builder; + } + + public static GetField fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String fieldName = parser.currentName(); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); + List values = new ArrayList<>(); + while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + Object value; + if (token == XContentParser.Token.VALUE_STRING) { + //binary values will be parsed back and returned as base64 strings when reading from json and yaml + value = parser.text(); + } else if (token == XContentParser.Token.VALUE_NUMBER) { + value = parser.numberValue(); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + value = parser.booleanValue(); + } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + //binary values will be parsed back and returned as BytesArray when reading from cbor and smile + value = new BytesArray(parser.binaryValue()); + } else { + throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: unsupported token found [" + token + "]"); + } + values.add(value); + } + return new GetField(fieldName, values); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetField objects = (GetField) o; + return Objects.equals(name, objects.name) && + Objects.equals(values, objects.values); + } + + @Override + public int hashCode() { + return Objects.hash(name, values); + } + + @Override + public String toString() { + return "GetField{" + + "name='" + name + '\'' + + ", values=" + values + + '}'; + } } diff --git a/core/src/main/java/org/elasticsearch/index/get/GetResult.java b/core/src/main/java/org/elasticsearch/index/get/GetResult.java index 0f02885a251..a03541c557f 100644 --- a/core/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/core/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.lookup.SourceLookup; @@ -38,12 +39,22 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; import static org.elasticsearch.index.get.GetField.readGetField; public class GetResult implements Streamable, Iterable, ToXContent { + private static final String _INDEX = "_index"; + private static final String _TYPE = "_type"; + private static final String _ID = "_id"; + private static final String _VERSION = "_version"; + private static final String FOUND = "found"; + private static final String FIELDS = "fields"; + private String index; private String type; private String id; @@ -57,7 +68,8 @@ public class GetResult implements Streamable, Iterable, ToXContent { GetResult() { } - public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, Map fields) { + public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, + Map fields) { this.index = index; this.type = type; this.id = id; @@ -196,15 +208,6 @@ public class GetResult implements Streamable, Iterable, ToXContent { return fields.values().iterator(); } - static final class Fields { - static final String _INDEX = "_index"; - static final String _TYPE = "_type"; - static final String _ID = "_id"; - static final String _VERSION = "_version"; - static final String FOUND = "found"; - static final String FIELDS = "fields"; - } - public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { List metaFields = new ArrayList<>(); List otherFields = new ArrayList<>(); @@ -225,20 +228,16 @@ public class GetResult implements Streamable, Iterable, ToXContent { builder.field(field.getName(), field.getValue()); } - builder.field(Fields.FOUND, exists); + builder.field(FOUND, exists); if (source != null) { XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params); } if (!otherFields.isEmpty()) { - builder.startObject(Fields.FIELDS); + builder.startObject(FIELDS); for (GetField field : otherFields) { - builder.startArray(field.getName()); - for (Object value : field.getValues()) { - builder.value(value); - } - builder.endArray(); + field.toXContent(builder, params); } builder.endObject(); } @@ -247,23 +246,69 @@ public class GetResult implements Streamable, Iterable, ToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (!isExists()) { - builder.field(Fields._INDEX, index); - builder.field(Fields._TYPE, type); - builder.field(Fields._ID, id); - builder.field(Fields.FOUND, false); - } else { - builder.field(Fields._INDEX, index); - builder.field(Fields._TYPE, type); - builder.field(Fields._ID, id); + builder.startObject(); + builder.field(_INDEX, index); + builder.field(_TYPE, type); + builder.field(_ID, id); + if (isExists()) { if (version != -1) { - builder.field(Fields._VERSION, version); + builder.field(_VERSION, version); } toXContentEmbedded(builder, params); + } else { + builder.field(FOUND, false); } + builder.endObject(); return builder; } + public static GetResult fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + String currentFieldName = null; + String index = null, type = null, id = null; + long version = -1; + boolean found = false; + BytesReference source = null; + Map fields = new HashMap<>(); + while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (_INDEX.equals(currentFieldName)) { + index = parser.text(); + } else if (_TYPE.equals(currentFieldName)) { + type = parser.text(); + } else if (_ID.equals(currentFieldName)) { + id = parser.text(); + } else if (_VERSION.equals(currentFieldName)) { + version = parser.longValue(); + } else if (FOUND.equals(currentFieldName)) { + found = parser.booleanValue(); + } else { + fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText()))); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (SourceFieldMapper.NAME.equals(currentFieldName)) { + try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { + //the original document gets slightly modified: whitespaces or pretty printing are not preserved, + //it all depends on the current builder settings + builder.copyCurrentStructure(parser); + source = builder.bytes(); + } + } else if (FIELDS.equals(currentFieldName)) { + while(parser.nextToken() != XContentParser.Token.END_OBJECT) { + GetField getField = GetField.fromXContent(parser); + fields.put(getField.getName(), getField); + } + } else { + throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } + } + return new GetResult(index, type, id, version, found, source, fields); + } + public static GetResult readGetResult(StreamInput in) throws IOException { GetResult result = new GetResult(); result.readFrom(in); @@ -314,5 +359,28 @@ public class GetResult implements Streamable, Iterable, ToXContent { } } } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetResult getResult = (GetResult) o; + return version == getResult.version && + exists == getResult.exists && + Objects.equals(index, getResult.index) && + Objects.equals(type, getResult.type) && + Objects.equals(id, getResult.id) && + Objects.equals(fields, getResult.fields) && + Objects.equals(sourceAsMap(), getResult.sourceAsMap()); + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, version, exists, fields, sourceAsMap()); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index b3e967bdad3..d05cec27b2e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -44,6 +45,7 @@ public class DocumentMapperParser { final MapperService mapperService; final IndexAnalyzers indexAnalyzers; + private final NamedXContentRegistry xContentRegistry; private final SimilarityService similarityService; private final Supplier queryShardContextSupplier; @@ -56,11 +58,12 @@ public class DocumentMapperParser { private final Map rootTypeParsers; public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, IndexAnalyzers indexAnalyzers, - SimilarityService similarityService, MapperRegistry mapperRegistry, + NamedXContentRegistry xContentRegistry, SimilarityService similarityService, MapperRegistry mapperRegistry, Supplier queryShardContextSupplier) { this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings()); this.mapperService = mapperService; this.indexAnalyzers = indexAnalyzers; + this.xContentRegistry = xContentRegistry; this.similarityService = similarityService; this.queryShardContextSupplier = queryShardContextSupplier; this.typeParsers = mapperRegistry.getMapperParsers(); @@ -159,7 +162,7 @@ public class DocumentMapperParser { private Tuple> extractMapping(String type, String source) throws MapperParsingException { Map root; - try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { + try (XContentParser parser = XContentFactory.xContent(source).createParser(xContentRegistry, source)) { root = parser.mapOrdered(); } catch (Exception e) { throw new MapperParsingException("failed to parse mapping definition", e); @@ -182,4 +185,8 @@ public class DocumentMapperParser { } return mapping; } + + NamedXContentRegistry getXContentRegistry() { + return xContentRegistry; + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 72d021c0e3e..d88ef2c7f44 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -58,7 +58,7 @@ final class DocumentParser { final Mapping mapping = docMapper.mapping(); final ParseContext.InternalParseContext context; - try (XContentParser parser = XContentHelper.createParser(source.source())) { + try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(), source.source())) { context = new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, source, parser); validateStart(parser); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 74e97120285..1d34d570a65 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AbstractIndexComponent; @@ -127,13 +128,14 @@ public class MapperService extends AbstractIndexComponent implements Closeable { final MapperRegistry mapperRegistry; - public MapperService(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, + public MapperService(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, NamedXContentRegistry xContentRegistry, SimilarityService similarityService, MapperRegistry mapperRegistry, Supplier queryShardContextSupplier) { super(indexSettings); this.indexAnalyzers = indexAnalyzers; this.fieldTypes = new FieldTypeLookup(); - this.documentParser = new DocumentMapperParser(indexSettings, this, indexAnalyzers, similarityService, mapperRegistry, queryShardContextSupplier); + this.documentParser = new DocumentMapperParser(indexSettings, this, indexAnalyzers, xContentRegistry, similarityService, + mapperRegistry, queryShardContextSupplier); this.indexAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultIndexAnalyzer(), p -> p.indexAnalyzer()); this.searchAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchAnalyzer(), p -> p.searchAnalyzer()); this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); @@ -186,8 +188,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return this.documentParser; } - public static Map parseMapping(String mappingSource) throws Exception { - try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) { + public static Map parseMapping(NamedXContentRegistry xContentRegistry, String mappingSource) throws Exception { + try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(xContentRegistry, mappingSource)) { return parser.map(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 475848989d4..6e88e8b46cd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -25,7 +25,6 @@ import org.elasticsearch.Version; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -43,7 +42,6 @@ import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; @@ -59,16 +57,11 @@ public class TypeParsers { private static final Set BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false")); public static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) { - // Hook onto ParseFieldMatcher so that parsing becomes strict when setting index.query.parse.strict - if (parserContext.parseFieldMatcher().isStrict()) { - return XContentMapValues.nodeBooleanValue(node); - } else { - // TODO: remove this leniency in 6.0 - if (BOOLEAN_STRINGS.contains(node.toString()) == false) { - DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node); - } - return XContentMapValues.lenientNodeBooleanValue(node); + // TODO: remove this leniency in 6.0 + if (BOOLEAN_STRINGS.contains(node.toString()) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node); } + return XContentMapValues.lenientNodeBooleanValue(node); } private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { @@ -211,10 +204,10 @@ public class TypeParsers { throw new MapperParsingException("[" + propName + "] must not have a [null] value"); } if (propName.equals("store")) { - builder.store(parseStore(name, propNode.toString(), parserContext)); + builder.store(parseStore(propNode.toString())); iterator.remove(); } else if (propName.equals("index")) { - builder.index(parseIndex(name, propNode.toString(), parserContext)); + builder.index(parseIndex(name, propNode.toString())); iterator.remove(); } else if (propName.equals(DOC_VALUES)) { builder.docValues(nodeBooleanValue(DOC_VALUES, propNode, parserContext)); @@ -229,7 +222,11 @@ public class TypeParsers { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("include_in_all")) { - builder.includeInAll(nodeBooleanValue("include_in_all", propNode, parserContext)); + if (parserContext.isWithinMultiField()) { + throw new MapperParsingException("include_in_all in multi fields is not allowed. Found the include_in_all in field [" + name + "] which is within a multi field."); + } else { + builder.includeInAll(nodeBooleanValue("include_in_all", propNode, parserContext)); + } iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -346,7 +343,7 @@ public class TypeParsers { } } - public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { + private static boolean parseIndex(String fieldName, String index) throws MapperParsingException { switch (index) { case "true": return true; @@ -355,31 +352,23 @@ public class TypeParsers { case "not_analyzed": case "analyzed": case "no": - if (parserContext.parseFieldMatcher().isStrict() == false) { - DEPRECATION_LOGGER.deprecated("Expected a boolean for property [index] but got [{}]", index); - return "no".equals(index) == false; - } else { - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); - } + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [index] but got [{}]", index); + return "no".equals(index) == false; default: throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); } } - public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException { - if (parserContext.parseFieldMatcher().isStrict()) { - return XContentMapValues.nodeBooleanValue(store); + private static boolean parseStore(String store) throws MapperParsingException { + if (BOOLEAN_STRINGS.contains(store) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [store] but got [{}]", store); + } + if ("no".equals(store)) { + return false; + } else if ("yes".equals(store)) { + return true; } else { - if (BOOLEAN_STRINGS.contains(store) == false) { - DEPRECATION_LOGGER.deprecated("Expected a boolean for property [store] but got [{}]", store); - } - if ("no".equals(store)) { - return false; - } else if ("yes".equals(store)) { - return true; - } else { - return lenientNodeBooleanValue(store); - } + return lenientNodeBooleanValue(store); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index ca61a66066c..07b39ba12c1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -62,9 +62,9 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.field(fieldName); - try (XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes)) { - builder.copyCurrentStructure(parser); - } + builder.rawField(fieldName, functionBytes); builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.endObject(); } @@ -181,7 +179,8 @@ public abstract class DecayFunctionBuilder @Override protected ScoreFunction doToFunction(QueryShardContext context) throws IOException { AbstractDistanceScoreFunction scoreFunction; - try (XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes)) { + // EMPTY is safe because parseVariable doesn't use namedObject + try (XContentParser parser = XContentFactory.xContent(functionBytes).createParser(NamedXContentRegistry.EMPTY, functionBytes)) { scoreFunction = parseVariable(fieldName, parser, context, multiValueMode); } return scoreFunction; diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index b037261d1f0..93e2b03e5f9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.FilterFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.ScoreFunction; -import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; @@ -436,8 +435,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder> scoreFunctionsRegistry, - QueryParseContext parseContext) throws IOException { + public static FunctionScoreQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); QueryBuilder query = null; @@ -481,11 +479,8 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder scoreFunction = scoreFunctionsRegistry - .lookup(currentFieldName, parseContext.getParseFieldMatcher(), parser.getTokenLocation()) - .fromXContent(parseContext); + ScoreFunctionBuilder scoreFunction = parser.namedObject(ScoreFunctionBuilder.class, currentFieldName, + parseContext); filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction)); } } else if (token == XContentParser.Token.START_ARRAY) { @@ -495,7 +490,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder> scoreFunctionsRegistry, - QueryParseContext parseContext, List filterFunctionBuilders) - throws IOException { + private static String parseFiltersAndFunctions(QueryParseContext parseContext, + List filterFunctionBuilders) throws IOException { String currentFieldName = null; XContentParser.Token token; XContentParser parser = parseContext.parser(); @@ -589,8 +583,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder { } public static CommitPoint fromXContent(byte[] data) throws Exception { - try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data)) { + // EMPTY is safe here because we never call namedObject + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, data)) { String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token == null) { diff --git a/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java index 967876348c8..40a75c16370 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java @@ -34,10 +34,11 @@ import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.seqno.SequenceNumbersService; @@ -55,7 +56,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -public class TruncateTranslogCommand extends SettingCommand { +public class TruncateTranslogCommand extends EnvironmentAwareCommand { private final OptionSpec translogFolder; private final OptionSpec batchMode; @@ -87,7 +88,7 @@ public class TruncateTranslogCommand extends SettingCommand { } @Override - protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { boolean batch = options.has(batchMode); Path translogPath = getTranslogPath(options); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index f4c586abc21..413af6466db 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.DirectoryReader; @@ -68,6 +69,8 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; @@ -133,7 +136,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -151,6 +153,7 @@ public class IndicesService extends AbstractLifecycleComponent Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), Property.NodeScope); private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; + private final NamedXContentRegistry xContentRegistry; private final TimeValue shardsClosedTimeout; private final AnalysisRegistry analysisRegistry; private final IndicesQueriesRegistry indicesQueriesRegistry; @@ -182,7 +185,7 @@ public class IndicesService extends AbstractLifecycleComponent threadPool.schedule(this.cleanInterval, ThreadPool.Names.SAME, this.cacheCleaner); } - public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, + public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, NamedXContentRegistry xContentRegistry, ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry, @@ -193,6 +196,7 @@ public class IndicesService extends AbstractLifecycleComponent this.threadPool = threadPool; this.pluginsService = pluginsService; this.nodeEnv = nodeEnv; + this.xContentRegistry = xContentRegistry; this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.analysisRegistry = analysisRegistry; this.indicesQueriesRegistry = indicesQueriesRegistry; @@ -441,6 +445,7 @@ public class IndicesService extends AbstractLifecycleComponent } return indexModule.newIndexService( nodeEnv, + xContentRegistry, this, circuitBreakerService, bigArrays, @@ -465,7 +470,7 @@ public class IndicesService extends AbstractLifecycleComponent final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexScopeSetting); final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry); pluginsService.onIndexModule(indexModule); - return indexModule.newIndexMapperService(mapperRegistry); + return indexModule.newIndexMapperService(xContentRegistry, mapperRegistry); } /** @@ -1259,11 +1264,16 @@ public class IndicesService extends AbstractLifecycleComponent private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true; public AliasFilter buildAliasFilter(ClusterState state, String index, String... expressions) { - Function factory = - (parser) -> new QueryParseContext(indicesQueriesRegistry, parser, new ParseFieldMatcher(settings)); + /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch + * of dependencies we pass in a function that can perform the parsing. */ + ShardSearchRequest.FilterParser filterParser = bytes -> { + try (XContentParser parser = XContentFactory.xContent(bytes).createParser(xContentRegistry, bytes)) { + return new QueryParseContext(indicesQueriesRegistry, parser, new ParseFieldMatcher(settings)).parseInnerQueryBuilder(); + } + }; String[] aliases = indexNameExpressionResolver.filteringAliases(state, index, expressions); IndexMetaData indexMetaData = state.metaData().index(index); - return new AliasFilter(ShardSearchRequest.parseAliasFilter(factory, indexMetaData, aliases), aliases); + return new AliasFilter(ShardSearchRequest.parseAliasFilter(filterParser, indexMetaData, aliases), aliases); } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 5f59af04b50..11554358b30 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -397,21 +397,21 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde } private void waitForClusterState(long clusterStateVersion) { - ClusterStateObserver observer = new ClusterStateObserver(clusterService, TimeValue.timeValueMinutes(5), logger, + final ClusterState clusterState = clusterService.state(); + ClusterStateObserver observer = new ClusterStateObserver(clusterState, clusterService, TimeValue.timeValueMinutes(5), logger, threadPool.getThreadContext()); - final ClusterState clusterState = observer.observedState(); if (clusterState.getVersion() >= clusterStateVersion) { logger.trace("node has cluster state with version higher than {} (current: {})", clusterStateVersion, clusterState.getVersion()); return; } else { logger.trace("waiting for cluster state version {} (current: {})", clusterStateVersion, clusterState.getVersion()); - final PlainActionFuture future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { - future.onResponse(null); + future.onResponse(state.getVersion()); } @Override @@ -425,15 +425,14 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde } }, newState -> newState.getVersion() >= clusterStateVersion); try { - future.get(); - logger.trace("successfully waited for cluster state with version {} (current: {})", clusterStateVersion, - observer.observedState().getVersion()); + long currentVersion = future.get(); + logger.trace("successfully waited for cluster state with version {} (current: {})", clusterStateVersion, currentVersion); } catch (Exception e) { logger.debug( (Supplier) () -> new ParameterizedMessage( "failed waiting for cluster state with version {} (current: {})", clusterStateVersion, - observer.observedState().getVersion()), + clusterService.state().getVersion()), e); throw ExceptionsHelper.convertToRuntime(e); } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index eaae1a3e881..02d3988c4a8 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -145,8 +145,24 @@ public final class IngestDocument { * or if the field that is found at the provided path is not of the expected type. */ public byte[] getFieldValueAsBytes(String path) { - Object object = getFieldValue(path, Object.class); - if (object instanceof byte[]) { + return getFieldValueAsBytes(path, false); + } + + /** + * Returns the value contained in the document for the provided path as a byte array. + * If the path value is a string, a base64 decode operation will happen. + * If the path value is a byte array, it is just returned + * @param path The path within the document in dot-notation + * @param ignoreMissing The flag to determine whether to throw an exception when `path` is not found in the document. + * @return the byte array for the provided path if existing + * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist + * or if the field that is found at the provided path is not of the expected type. + */ + public byte[] getFieldValueAsBytes(String path, boolean ignoreMissing) { + Object object = getFieldValue(path, Object.class, ignoreMissing); + if (object == null) { + return null; + } else if (object instanceof byte[]) { return (byte[]) object; } else if (object instanceof String) { return Base64.getDecoder().decode(object.toString()); diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 1e938581b8c..1171865a007 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -41,9 +41,11 @@ import org.elasticsearch.common.xcontent.XContentHelper; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; public class PipelineStore extends AbstractComponent implements ClusterStateApplier { @@ -111,17 +113,26 @@ public class PipelineStore extends AbstractComponent implements ClusterStateAppl return currentState; } Map pipelines = currentIngestMetadata.getPipelines(); - if (pipelines.containsKey(request.getId()) == false) { - throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId()); - } else { - pipelines = new HashMap<>(pipelines); - pipelines.remove(request.getId()); - ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metaData(MetaData.builder(currentState.getMetaData()) - .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)) - .build()); - return newState.build(); + Set toRemove = new HashSet<>(); + for (String pipelineKey : pipelines.keySet()) { + if (Regex.simpleMatch(request.getId(), pipelineKey)) { + toRemove.add(pipelineKey); + } } + if (toRemove.isEmpty() && Regex.isMatchAllPattern(request.getId()) == false) { + throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId()); + } else if (toRemove.isEmpty()) { + return currentState; + } + final Map pipelinesCopy = new HashMap<>(pipelines); + for (String key : toRemove) { + pipelinesCopy.remove(key); + } + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelinesCopy)) + .build()); + return newState.build(); } /** diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index ec64c0f710a..bfe5ad453b5 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -74,6 +74,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; @@ -159,6 +160,8 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.util.stream.Collectors.toList; + /** * A node represent a node within a cluster (cluster.name). The {@link #client()} can be used * in order to use a {@link Client} to perform actions/operations against the cluster. @@ -362,8 +365,13 @@ public class Node implements Closeable { .flatMap(p -> p.getNamedWriteables().stream())) .flatMap(Function.identity()).collect(Collectors.toList()); final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(Stream.of( + searchModule.getNamedXContents().stream(), + pluginsService.filterPlugins(Plugin.class).stream() + .flatMap(p -> p.getNamedXContent().stream()) + ).flatMap(Function.identity()).collect(toList())); final MetaStateService metaStateService = new MetaStateService(settings, nodeEnvironment); - final IndicesService indicesService = new IndicesService(settings, pluginsService, nodeEnvironment, + final IndicesService indicesService = new IndicesService(settings, pluginsService, nodeEnvironment, xContentRegistry, settingsModule.getClusterSettings(), analysisModule.getAnalysisRegistry(), searchModule.getQueryParserRegistry(), clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, scriptModule.getScriptService(), @@ -371,14 +379,15 @@ public class Node implements Closeable { Collection pluginComponents = pluginsService.filterPlugins(Plugin.class).stream() .flatMap(p -> p.createComponents(client, clusterService, threadPool, resourceWatcherService, - scriptModule.getScriptService(), searchModule.getSearchRequestParsers()).stream()) + scriptModule.getScriptService(), searchModule.getSearchRequestParsers(), + xContentRegistry).stream()) .collect(Collectors.toList()); Collection>> customMetaDataUpgraders = pluginsService.filterPlugins(Plugin.class).stream() .map(Plugin::getCustomMetaDataUpgrader) .collect(Collectors.toList()); - final NetworkModule networkModule = new NetworkModule(settings, false, pluginsService.filterPlugins(NetworkPlugin.class), threadPool, - bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); + final NetworkModule networkModule = new NetworkModule(settings, false, pluginsService.filterPlugins(NetworkPlugin.class), + threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService); final MetaDataUpgrader metaDataUpgrader = new MetaDataUpgrader(customMetaDataUpgraders); final Transport transport = networkModule.getTransportSupplier().get(); final TransportService transportService = newTransportService(settings, transport, threadPool, @@ -404,6 +413,7 @@ public class Node implements Closeable { b.bind(IndicesQueriesRegistry.class).toInstance(searchModule.getQueryParserRegistry()); b.bind(SearchRequestParsers.class).toInstance(searchModule.getSearchRequestParsers()); b.bind(SearchExtRegistry.class).toInstance(searchModule.getSearchExtRegistry()); + b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry); b.bind(PluginsService.class).toInstance(pluginsService); b.bind(Client.class).toInstance(client); b.bind(NodeClient.class).toInstance(client); @@ -432,7 +442,7 @@ public class Node implements Closeable { b.bind(AllocationCommandRegistry.class).toInstance(NetworkModule.getAllocationCommandRegistry()); b.bind(UpdateHelper.class).toInstance(new UpdateHelper(settings, scriptModule.getScriptService())); b.bind(MetaDataIndexUpgradeService.class).toInstance(new MetaDataIndexUpgradeService(settings, - indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings())); + xContentRegistry, indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings())); b.bind(ClusterInfoService.class).toInstance(clusterInfoService); b.bind(Discovery.class).toInstance(discoveryModule.getDiscovery()); { @@ -589,8 +599,9 @@ public class Node implements Closeable { final TimeValue initialStateTimeout = DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings); if (initialStateTimeout.millis() > 0) { final ThreadPool thread = injector.getInstance(ThreadPool.class); - ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, thread.getThreadContext()); - if (observer.observedState().nodes().getMasterNodeId() == null) { + ClusterState clusterState = clusterService.state(); + ClusterStateObserver observer = new ClusterStateObserver(clusterState, clusterService, null, logger, thread.getThreadContext()); + if (clusterState.nodes().getMasterNodeId() == null) { logger.debug("waiting to join the cluster. timeout [{}]", initialStateTimeout); final CountDownLatch latch = new CountDownLatch(1); observer.waitForNextChange(new ClusterStateObserver.Listener() { diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 8d79b0543f0..403816aafa8 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -27,7 +27,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.collect.Tuple; @@ -103,7 +103,7 @@ import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. */ -class InstallPluginCommand extends SettingCommand { +class InstallPluginCommand extends EnvironmentAwareCommand { private static final String PROPERTY_STAGING_ID = "es.plugins.staging"; @@ -189,18 +189,17 @@ class InstallPluginCommand extends SettingCommand { } @Override - protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String pluginId = arguments.value(options); boolean isBatch = options.has(batchOption) || System.console() == null; - execute(terminal, pluginId, isBatch, settings); + execute(terminal, pluginId, isBatch, env); } // pkg private for testing - void execute(Terminal terminal, String pluginId, boolean isBatch, Map settings) throws Exception { + void execute(Terminal terminal, String pluginId, boolean isBatch, Environment env) throws Exception { if (pluginId == null) { throw new UserException(ExitCodes.USAGE, "plugin id is required"); } - final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); // TODO: remove this leniency!! is it needed anymore? if (Files.exists(env.pluginsFile()) == false) { terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating..."); diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index bd2f853bac0..3f21c44a8f4 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -20,7 +20,7 @@ package org.elasticsearch.plugins; import joptsimple.OptionSet; -import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -38,15 +38,14 @@ import java.util.Map; /** * A command for the plugin cli to list plugins installed in elasticsearch. */ -class ListPluginsCommand extends SettingCommand { +class ListPluginsCommand extends EnvironmentAwareCommand { ListPluginsCommand() { super("Lists installed elasticsearch plugins"); } @Override - protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { - final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } diff --git a/core/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java b/core/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java index 32d0e6058ea..991a21f1b32 100644 --- a/core/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; @@ -62,9 +63,8 @@ public interface NetworkPlugin { * See {@link org.elasticsearch.common.network.NetworkModule#HTTP_TYPE_SETTING} to configure a specific implementation. */ default Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, - CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, - NetworkService networkService) { + CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { return Collections.emptyMap(); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 32d902248a6..e7d97b0724e 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -19,12 +19,6 @@ package org.elasticsearch.plugins; -import java.io.Closeable; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - import org.elasticsearch.action.ActionModule; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.client.Client; @@ -39,6 +33,8 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.index.IndexModule; import org.elasticsearch.indices.analysis.AnalysisModule; @@ -51,6 +47,11 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import java.io.Closeable; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.function.UnaryOperator; @@ -106,7 +107,7 @@ public abstract class Plugin implements Closeable { */ public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, - SearchRequestParsers searchRequestParsers) { + SearchRequestParsers searchRequestParsers, NamedXContentRegistry xContentRegistry) { return Collections.emptyList(); } @@ -126,6 +127,14 @@ public abstract class Plugin implements Closeable { return Collections.emptyList(); } + /** + * Returns parsers for named objects this plugin will parse from {@link XContentParser#namedObject(Class, String, Object)}. + * @see NamedWriteableRegistry + */ + public List getNamedXContent() { + return Collections.emptyList(); + } + /** * Called before a new index is created on a node. The given module can be used to register index-level * extensions. diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index 2eaa25ca39c..fab85c5c5af 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -19,32 +19,28 @@ package org.elasticsearch.plugins; -import joptsimple.OptionSet; -import joptsimple.OptionSpec; - -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.SettingCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.node.internal.InternalSettingsPreparer; - import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; -import java.util.Map; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.cli.EnvironmentAwareCommand; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.env.Environment; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. */ -class RemovePluginCommand extends SettingCommand { +class RemovePluginCommand extends EnvironmentAwareCommand { private final OptionSpec arguments; @@ -54,15 +50,13 @@ class RemovePluginCommand extends SettingCommand { } @Override - protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String arg = arguments.value(options); - execute(terminal, arg, settings); + execute(terminal, arg, env); } // pkg private for testing - void execute(Terminal terminal, String pluginName, Map settings) throws Exception { - final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); - + void execute(Terminal terminal, String pluginName, Environment env) throws Exception { terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); final Path pluginDir = env.pluginsFile().resolve(pluginName); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java index 04900705e0a..aadf871c09f 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.FromXContentBuilder; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -109,10 +110,10 @@ public abstract class BlobStoreFormat { } protected T read(BytesReference bytes) throws IOException { - try (XContentParser parser = XContentHelper.createParser(bytes)) { + // EMPTY is safe here because no reader calls namedObject + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, bytes)) { T obj = reader.fromXContent(parser, parseFieldMatcher); return obj; - } } diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index aa487056cc1..c7abded5e0e 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -68,6 +68,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -625,7 +626,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) { BytesStreamOutput out = new BytesStreamOutput(); Streams.copy(blob, out); - try (XContentParser parser = XContentHelper.createParser(out.bytes())) { + // EMPTY is safe here because RepositoryData#fromXContent calls namedObject + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes())) { repositoryData = RepositoryData.fromXContent(parser); } } diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java index ad6a367a7b7..8c05a2b3ae7 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -46,11 +47,13 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; public abstract class RestRequest implements ToXContent.Params { + private final NamedXContentRegistry xContentRegistry; private final Map params; private final String rawPath; private final Set consumedParams = new HashSet<>(); - public RestRequest(String uri) { + public RestRequest(NamedXContentRegistry xContentRegistry, String uri) { + this.xContentRegistry = xContentRegistry; final Map params = new HashMap<>(); int pathEndPos = uri.indexOf('?'); if (pathEndPos < 0) { @@ -62,7 +65,8 @@ public abstract class RestRequest implements ToXContent.Params { this.params = params; } - public RestRequest(Map params, String path) { + public RestRequest(NamedXContentRegistry xContentRegistry, Map params, String path) { + this.xContentRegistry = xContentRegistry; this.params = params; this.rawPath = path; } @@ -228,6 +232,13 @@ public abstract class RestRequest implements ToXContent.Params { return params; } + /** + * Get the {@link NamedXContentRegistry} that should be used to create parsers from this request. + */ + public NamedXContentRegistry getXContentRegistry() { + return xContentRegistry; + } + /** * A parser for the contents of this request if there is a body, otherwise throws an {@link ElasticsearchParseException}. Use * {@link #applyContentParser(CheckedConsumer)} if you want to gracefully handle when the request doesn't have any contents. Use @@ -238,7 +249,7 @@ public abstract class RestRequest implements ToXContent.Params { if (content.length() == 0) { throw new ElasticsearchParseException("Body required"); } - return XContentFactory.xContent(content).createParser(content); + return XContentFactory.xContent(content).createParser(xContentRegistry, content); } /** @@ -270,7 +281,7 @@ public abstract class RestRequest implements ToXContent.Params { if (content.length() == 0) { throw new ElasticsearchParseException("Body required"); } - return XContentFactory.xContent(content).createParser(content); + return XContentFactory.xContent(content).createParser(xContentRegistry, content); } /** @@ -281,7 +292,7 @@ public abstract class RestRequest implements ToXContent.Params { public final void withContentOrSourceParamParserOrNull(CheckedConsumer withParser) throws IOException { BytesReference content = contentOrSourceParam(); if (content.length() > 0) { - try (XContentParser parser = XContentFactory.xContent(content).createParser(content)) { + try (XContentParser parser = XContentFactory.xContent(content).createParser(xContentRegistry, content)) { withParser.accept(parser); } } else { diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java b/core/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java index 9e5c4f40659..c9ba8df6a0f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java @@ -42,12 +42,23 @@ public class RestToXContentListener extends RestRes } public final RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { - builder.startObject(); + if (wrapInObject()) { + builder.startObject(); + } response.toXContent(builder, channel.request()); - builder.endObject(); + if (wrapInObject()) { + builder.endObject(); + } return new BytesRestResponse(getStatus(response), builder); } + protected boolean wrapInObject() { + //Ideally, the toXContent method starts with startObject and ends with endObject. + //In practice, we have many places where toXContent produces a json fragment that's not valid by itself. We will + //migrate those step by step, so that we never have to start objects here, and we can remove this method. + return true; + } + protected RestStatus getStatus(Response response) { return RestStatus.OK; } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 7206e6b9d5e..e032ec9fde5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -25,15 +25,13 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -76,17 +74,15 @@ public class RestGetAction extends BaseRestHandler { getRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request)); - return channel -> client.get(getRequest, new RestBuilderListener(channel) { + return channel -> client.get(getRequest, new RestToXContentListener(channel) { @Override - public RestResponse buildResponse(GetResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - response.toXContent(builder, request); - builder.endObject(); - if (!response.isExists()) { - return new BytesRestResponse(NOT_FOUND, builder); - } else { - return new BytesRestResponse(OK, builder); - } + protected boolean wrapInObject() { + return false; + } + + @Override + protected RestStatus getStatus(GetResponse response) { + return response.isExists() ? OK : NOT_FOUND; } }); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 3880ec6ca9e..43739976ba0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestStatusToXContentListener; import java.io.IOException; +import java.net.URISyntaxException; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -82,7 +83,14 @@ public class RestIndexAction extends BaseRestHandler { } return channel -> - client.index(indexRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(indexRequest.routing()))); + client.index(indexRequest, new RestStatusToXContentListener<>(channel, r -> { + try { + return r.getLocation(indexRequest.routing()); + } catch (URISyntaxException ex) { + logger.warn("Location string is not a valid URI.", ex); + return null; + } + })); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index ade0200215a..57039bdbe95 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; +import java.net.URISyntaxException; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -99,6 +100,13 @@ public class RestUpdateAction extends BaseRestHandler { }); return channel -> - client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing()))); + client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> { + try { + return r.getLocation(updateRequest.routing()); + } catch (URISyntaxException ex) { + logger.warn("Location string is not a valid URI.", ex); + return null; + } + })); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 6dfd587f848..502d427050f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -82,7 +82,6 @@ public class RestMultiSearchAction extends BaseRestHandler { public static MultiSearchRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex, SearchRequestParsers searchRequestParsers, ParseFieldMatcher parseFieldMatcher) throws IOException { - MultiSearchRequest multiRequest = new MultiSearchRequest(); if (restRequest.hasParam("max_concurrent_searches")) { multiRequest.maxConcurrentSearchRequests(restRequest.paramAsInt("max_concurrent_searches", 0)); @@ -107,7 +106,7 @@ public class RestMultiSearchAction extends BaseRestHandler { * Parses a multi-line {@link RestRequest} body, instanciating a {@link SearchRequest} for each line and applying the given consumer. */ public static void parseMultiLineRequest(RestRequest request, IndicesOptions indicesOptions, boolean allowExplicitIndex, - BiConsumer consumer) throws IOException { + BiConsumer consumer) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); String[] types = Strings.splitStringByCommaToArray(request.param("type")); @@ -153,7 +152,7 @@ public class RestMultiSearchAction extends BaseRestHandler { // now parse the action if (nextMarker - from > 0) { - try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) { + try (XContentParser parser = xContent.createParser(request.getXContentRegistry(), data.slice(from, nextMarker - from))) { Map source = parser.map(); for (Map.Entry entry : source.entrySet()) { Object value = entry.getValue(); @@ -187,7 +186,7 @@ public class RestMultiSearchAction extends BaseRestHandler { break; } BytesReference bytes = data.slice(from, nextMarker - from); - try (XContentParser parser = XContentFactory.xContent(bytes).createParser(bytes)) { + try (XContentParser parser = XContentFactory.xContent(bytes).createParser(request.getXContentRegistry(), bytes)) { consumer.accept(searchRequest, parser); } // move pointers diff --git a/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java b/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java index 84855da2f94..44456dbec20 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -75,7 +76,8 @@ public final class ScriptMetaData implements MetaData.Custom { // 2) wrapped into a 'template' json object or field // 3) just as is // In order to fetch the actual script in consistent manner this parsing logic is needed: - try (XContentParser parser = XContentHelper.createParser(scriptAsBytes); + // EMPTY is ok here because we never call namedObject, we're just copying structure. + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, scriptAsBytes); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { parser.nextToken(); parser.nextToken(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index cdfcaeab90b..a5ea15c6702 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -23,11 +23,14 @@ import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.builders.ShapeBuilders; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ParseFieldRegistry; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.CommonTermsQueryBuilder; @@ -54,6 +57,7 @@ import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.ParentIdQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.RegexpQueryBuilder; @@ -79,7 +83,6 @@ import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.LinearDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.WeightBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -266,7 +269,6 @@ public class SearchModule { private final boolean transportClient; private final Map highlighters; private final Map> suggesters; - private final ParseFieldRegistry> scoreFunctionParserRegistry = new ParseFieldRegistry<>("score_function"); private final IndicesQueriesRegistry queryParserRegistry = new IndicesQueriesRegistry(); private final ParseFieldRegistry aggregationParserRegistry = new ParseFieldRegistry<>("aggregation"); private final ParseFieldRegistry pipelineAggregationParserRegistry = new ParseFieldRegistry<>( @@ -281,7 +283,8 @@ public class SearchModule { private final SearchExtRegistry searchExtParserRegistry = new SearchExtRegistry(); private final Settings settings; - private final List namedWriteables = new ArrayList<>(); + private final List namedWriteables = new ArrayList<>(); + private final List namedXContents = new ArrayList<>(); private final SearchRequestParsers searchRequestParsers; public SearchModule(Settings settings, boolean transportClient, List plugins) { @@ -304,10 +307,14 @@ public class SearchModule { searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters(), searchExtParserRegistry); } - public List getNamedWriteables() { + public List getNamedWriteables() { return namedWriteables; } + public List getNamedXContents() { + return namedXContents; + } + public Suggesters getSuggesters() { return new Suggesters(suggesters); } @@ -618,8 +625,12 @@ public class SearchModule { } private void registerScoreFunction(ScoreFunctionSpec scoreFunction) { - scoreFunctionParserRegistry.register(scoreFunction.getParser(), scoreFunction.getName()); - namedWriteables.add(new Entry(ScoreFunctionBuilder.class, scoreFunction.getName().getPreferredName(), scoreFunction.getReader())); + namedWriteables.add(new NamedWriteableRegistry.Entry( + ScoreFunctionBuilder.class, scoreFunction.getName().getPreferredName(), scoreFunction.getReader())); + // TODO remove funky contexts + namedXContents.add(new NamedXContentRegistry.Entry( + ScoreFunctionBuilder.class, scoreFunction.getName(), + (XContentParser p, Object c) -> scoreFunction.getParser().fromXContent((QueryParseContext) c))); } private void registerValueFormats() { @@ -742,7 +753,7 @@ public class SearchModule { registerQuery( new QuerySpec<>(SpanMultiTermQueryBuilder.NAME, SpanMultiTermQueryBuilder::new, SpanMultiTermQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(FunctionScoreQueryBuilder.NAME, FunctionScoreQueryBuilder::new, - c -> FunctionScoreQueryBuilder.fromXContent(scoreFunctionParserRegistry, c))); + FunctionScoreQueryBuilder::fromXContent)); registerQuery( new QuerySpec<>(SimpleQueryStringBuilder.NAME, SimpleQueryStringBuilder::new, SimpleQueryStringBuilder::fromXContent)); registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); diff --git a/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java b/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java index fd684380526..8d55dfbab07 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java +++ b/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -62,7 +64,14 @@ public final class AliasFilter implements Writeable { if (reparseAliases) { // we are processing a filter received from a 5.0 node - we need to reparse this on the executing node final IndexMetaData indexMetaData = context.getIndexSettings().getIndexMetaData(); - return ShardSearchRequest.parseAliasFilter(context::newParseContext, indexMetaData, aliases); + /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch + * of dependencies we pass in a function that can perform the parsing. */ + ShardSearchRequest.FilterParser filterParser = bytes -> { + try (XContentParser parser = XContentFactory.xContent(bytes).createParser(context.getXContentRegistry(), bytes)) { + return context.newParseContext(parser).parseInnerQueryBuilder(); + } + }; + return ShardSearchRequest.parseAliasFilter(filterParser, indexMetaData, aliases); } return filter; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index cc7dc3eb5fb..f021d7730cf 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -24,12 +24,9 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.AliasFilterParsingException; @@ -91,13 +88,17 @@ public interface ShardSearchRequest { */ void rewrite(QueryShardContext context) throws IOException; + @FunctionalInterface + public interface FilterParser { + QueryBuilder parse(byte[] bytes) throws IOException; + } /** * Returns the filter associated with listed filtering aliases. *

* The list of filtering aliases should be obtained by calling MetaData.filteringAliases. * Returns null if no filtering is required.

*/ - static QueryBuilder parseAliasFilter(Function contextFactory, + static QueryBuilder parseAliasFilter(FilterParser filterParser, IndexMetaData metaData, String... aliasNames) { if (aliasNames == null || aliasNames.length == 0) { return null; @@ -109,10 +110,7 @@ public interface ShardSearchRequest { return null; } try { - byte[] filterSource = alias.filter().uncompressed(); - try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { - return contextFactory.apply(parser).parseInnerQueryBuilder(); - } + return filterParser.parse(alias.filter().uncompressed()); } catch (IOException ex) { throw new AliasFilterParsingException(index, alias.getAlias(), "Invalid alias filter", ex); } @@ -128,19 +126,19 @@ public interface ShardSearchRequest { // we need to bench here a bit, to see maybe it makes sense to use OrFilter BoolQueryBuilder combined = new BoolQueryBuilder(); for (String aliasName : aliasNames) { - AliasMetaData alias = aliases.get(aliasName); - if (alias == null) { - // This shouldn't happen unless alias disappeared after filteringAliases was called. - throw new InvalidAliasNameException(index, aliasNames[0], - "Unknown alias name was passed to alias Filter"); - } - QueryBuilder parsedFilter = parserFunction.apply(alias); - if (parsedFilter != null) { - combined.should(parsedFilter); - } else { - // The filter might be null only if filter was removed after filteringAliases was called - return null; - } + AliasMetaData alias = aliases.get(aliasName); + if (alias == null) { + // This shouldn't happen unless alias disappeared after filteringAliases was called. + throw new InvalidAliasNameException(index, aliasNames[0], + "Unknown alias name was passed to alias Filter"); + } + QueryBuilder parsedFilter = parserFunction.apply(alias); + if (parsedFilter != null) { + combined.should(parsedFilter); + } else { + // The filter might be null only if filter was removed after filteringAliases was called + return null; + } } return combined; } diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 9defd05f471..37e660d2a31 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -74,10 +74,8 @@ public class GeoDistanceSortBuilder extends SortBuilder private static final ParseField UNIT_FIELD = new ParseField("unit"); private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); private static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method"); - private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed") - .withAllDeprecated("use validation_method instead"); - private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize") - .withAllDeprecated("use validation_method instead"); + private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed").withAllDeprecated("validation_method"); + private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize").withAllDeprecated("validation_method"); private static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode"); private final String fieldName; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index ca1d7a2306f..0028767c36f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; @@ -29,6 +28,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; @@ -43,7 +43,7 @@ import java.util.Objects; /** * Base class for the different suggestion implementations. */ -public abstract class SuggestionBuilder> extends ToXContentToBytes implements NamedWriteable { +public abstract class SuggestionBuilder> implements NamedWriteable, ToXContent { protected final String field; protected String text; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 09382d9aaff..0fd3726384f 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -42,7 +42,6 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -233,10 +232,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder { QueryShardContext shardContext = suggestion.getShardContext(); final ExecutableScript executable = collateScript.apply(vars); final BytesReference querySource = (BytesReference) executable.run(); - try (XContentParser parser = XContentFactory.xContent(querySource).createParser(querySource)) { + try (XContentParser parser = XContentFactory.xContent(querySource).createParser(shardContext.getXContentRegistry(), + querySource)) { QueryBuilder innerQueryBuilder = shardContext.newParseContext(parser).parseInnerQueryBuilder(); final ParsedQuery parsedQuery = shardContext.toQuery(innerQueryBuilder); collateMatch = Lucene.exists(searcher, parsedQuery.query()); diff --git a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index f172945d84f..c5d9d31c1eb 100644 --- a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -106,7 +106,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { assertExceptionAsJson(e, false, equalTo(expectedJson)); ElasticsearchException parsed; - try (XContentParser parser = XContentType.JSON.xContent().createParser(expectedJson)) { + try (XContentParser parser = createParser(XContentType.JSON.xContent(), expectedJson)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = ElasticsearchException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); @@ -142,7 +142,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { .endObject(); ElasticsearchException parsed; - try (XContentParser parser = xContent.createParser(builder.bytes())) { + try (XContentParser parser = createParser(xContent, builder.bytes())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = ElasticsearchException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); @@ -163,7 +163,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(e).endObject(); ElasticsearchException parsed; - try (XContentParser parser = xContent.createParser(builder.bytes())) { + try (XContentParser parser = createParser(builder)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = ElasticsearchException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); @@ -206,7 +206,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(foo).endObject(); ElasticsearchException parsed; - try (XContentParser parser = xContent.createParser(builder.bytes())) { + try (XContentParser parser = createParser(builder)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = ElasticsearchException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 202224872b4..786426822dc 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -779,6 +779,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(145, org.elasticsearch.ElasticsearchStatusException.class); ids.put(146, org.elasticsearch.tasks.TaskCancelledException.class); ids.put(147, org.elasticsearch.env.ShardLockObtainFailedException.class); + ids.put(148, org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index 017ba128d50..0f6332006d9 100644 --- a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -30,13 +30,14 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.net.URISyntaxException; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; public class DocWriteResponseTests extends ESTestCase { - public void testGetLocation() { + public void testGetLocation() throws URISyntaxException { DocWriteResponse response = new DocWriteResponse( new ShardId("index", "uuid", 0), @@ -51,6 +52,35 @@ public class DocWriteResponseTests extends ESTestCase { assertEquals("/index/type/id?routing=test_routing", response.getLocation("test_routing")); } + public void testGetLocationNonAscii() throws URISyntaxException { + DocWriteResponse response = + new DocWriteResponse( + new ShardId("index", "uuid", 0), + "type", + "❤", + SequenceNumbersService.UNASSIGNED_SEQ_NO, + 0, + Result.CREATED) { + }; + assertEquals("/index/type/%E2%9D%A4", response.getLocation(null)); + assertEquals("/index/type/%E2%9D%A4?routing=%C3%A4", response.getLocation("%C3%A4")); + } + + public void testInvalidGetLocation() { + String invalidPath = "!^*$(@!^!#@"; + DocWriteResponse invalid = + new DocWriteResponse( + new ShardId("index", "uuid", 0), + "type", + invalidPath, + SequenceNumbersService.UNASSIGNED_SEQ_NO, + 0, + Result.CREATED) { + }; + Throwable exception = expectThrows(URISyntaxException.class, () -> invalid.getLocation(null)); + assertTrue(exception.getMessage().contains(invalidPath)); + } + /** * Tests that {@link DocWriteResponse#toXContent(XContentBuilder, ToXContent.Params)} doesn't include {@code forced_refresh} unless it * is true. We can't assert this in the yaml tests because "not found" is also "false" there.... diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 33f683b458c..f6ba7d9f022 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -135,13 +135,15 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { assertEquals(d.type(), Decision.Type.NO); if (noAttrNode.equals(nodeName)) { - assertThat(d.toString(), containsString("node does not match [index.routing.allocation.include] filters [foo:\"bar\"]")); + assertThat(d.toString(), containsString("node does not match index setting [index.routing.allocation.include] " + + "filters [foo:\"bar\"]")); assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", explanation.getFinalExplanation()); assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision); } else if (barAttrNode.equals(nodeName)) { - assertThat(d.toString(), containsString("node does not match [index.routing.allocation.include] filters [foo:\"bar\"]")); + assertThat(d.toString(), containsString("node does not match index setting [index.routing.allocation.include] " + + "filters [foo:\"bar\"]")); barAttrWeight = weight; assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index e1205840976..bc52bdc22be 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -179,7 +179,7 @@ public class ClusterRerouteRequestTests extends ESTestCase { return RestClusterRerouteAction.createRequest(restRequest, allocationCommandRegistry, ParseFieldMatcher.STRICT); } - private static RestRequest toRestRequest(ClusterRerouteRequest original) throws IOException { + private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOException { Map params = new HashMap<>(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); boolean hasBody = false; @@ -209,7 +209,7 @@ public class ClusterRerouteRequestTests extends ESTestCase { } builder.endObject(); - FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(); + FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(params); if (hasBody) { requestBuilder.withContent(builder.bytes()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 999d16fdcb5..17321daf9af 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; import java.util.Set; @@ -36,12 +35,12 @@ public class RolloverRequestTests extends ESTestCase { final RolloverRequest request = new RolloverRequest(randomAsciiOfLength(10), randomAsciiOfLength(10)); final XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() - .startObject("conditions") - .field("max_age", "10d") - .field("max_docs", 100) - .endObject() + .startObject("conditions") + .field("max_age", "10d") + .field("max_docs", 100) + .endObject() .endObject(); - RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY); + RolloverRequest.PARSER.parse(createParser(builder), request, () -> ParseFieldMatcher.EMPTY); Set conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); for (Condition condition : conditions) { @@ -61,28 +60,28 @@ public class RolloverRequestTests extends ESTestCase { final RolloverRequest request = new RolloverRequest(randomAsciiOfLength(10), randomAsciiOfLength(10)); final XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() - .startObject("conditions") - .field("max_age", "10d") - .field("max_docs", 100) - .endObject() - .startObject("mappings") - .startObject("type1") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("index", "not_analyzed") + .startObject("conditions") + .field("max_age", "10d") + .field("max_docs", 100) + .endObject() + .startObject("mappings") + .startObject("type1") + .startObject("properties") + .startObject("field1") + .field("type", "string") + .field("index", "not_analyzed") + .endObject() .endObject() .endObject() .endObject() - .endObject() - .startObject("settings") - .field("number_of_shards", 10) - .endObject() - .startObject("aliases") - .startObject("alias1").endObject() - .endObject() + .startObject("settings") + .field("number_of_shards", 10) + .endObject() + .startObject("aliases") + .startObject("alias1").endObject() + .endObject() .endObject(); - RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY); + RolloverRequest.PARSER.parse(createParser(builder), request, () -> ParseFieldMatcher.EMPTY); Set conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/BWCTemplateTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/BWCTemplateTests.java index 69c6731aa15..7ea103313fe 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/BWCTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/BWCTemplateTests.java @@ -42,12 +42,14 @@ public class BWCTemplateTests extends ESSingleNodeTestCase { client().prepareIndex("packetbeat-foo", "doc", "1").setSource("message", "foo").get(); client().prepareIndex("filebeat-foo", "doc", "1").setSource("message", "foo").get(); client().prepareIndex("winlogbeat-foo", "doc", "1").setSource("message", "foo").get(); + assertWarnings("Deprecated field [template] used, replaced by [index_patterns]"); } public void testLogstashTemplatesBWC() throws Exception { String ls5x = copyToStringFromClasspath("/org/elasticsearch/action/admin/indices/template/logstash-5.0.template.json"); client().admin().indices().preparePutTemplate("logstash-5x").setSource(ls5x).get(); client().prepareIndex("logstash-foo", "doc", "1").setSource("message", "foo").get(); + assertWarnings("Deprecated field [template] used, replaced by [index_patterns]"); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 9779ce83a65..48598ecb2ec 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService.PutReques import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.indices.IndicesService; @@ -58,7 +59,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { map.put("index.shard.check_on_startup", "blargh"); request.settings(Settings.builder().put(map).build()); - List throwables = putTemplate(request); + List throwables = putTemplate(xContentRegistry(), request); assertEquals(throwables.size(), 1); assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class)); assertThat(throwables.get(0).getMessage(), @@ -76,7 +77,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0"); request.settings(Settings.builder().put(map).build()); - List throwables = putTemplate(request); + List throwables = putTemplate(xContentRegistry(), request); assertEquals(throwables.size(), 1); assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class)); assertThat(throwables.get(0).getMessage(), containsString("name must not contain a space")); @@ -90,7 +91,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { request.patterns(Arrays.asList("foo", "foobar")); request.aliases(Collections.singleton(new Alias("foobar"))); - List errors = putTemplate(request); + List errors = putTemplate(xContentRegistry(), request); assertThat(errors.size(), equalTo(1)); assertThat(errors.get(0), instanceOf(IllegalArgumentException.class)); assertThat(errors.get(0).getMessage(), equalTo("Alias [foobar] cannot be the same as any pattern in [foo, foobar]")); @@ -158,17 +159,17 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { } - private static List putTemplate(PutRequest request) { + private static List putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) { MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService( Settings.EMPTY, null, null, null, null, - null, null, null); + null, null, null, xContentRegistry); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY), null, - new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)); + new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS), xContentRegistry); final List throwables = new ArrayList<>(); service.putTemplate(request, new MetaDataIndexTemplateService.PutListener() { @@ -196,10 +197,11 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { null, null, null, - null); + null, + xContentRegistry()); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService( Settings.EMPTY, clusterService, createIndexService, new AliasValidator(Settings.EMPTY), indicesService, - new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)); + new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS), xContentRegistry()); final List throwables = new ArrayList<>(); final CountDownLatch latch = new CountDownLatch(1); diff --git a/core/src/test/java/org/elasticsearch/action/get/GetResponseTests.java b/core/src/test/java/org/elasticsearch/action/get/GetResponseTests.java new file mode 100644 index 00000000000..0fb347dd3d9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/get/GetResponseTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.get; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetField; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.index.get.GetResultTests.copyGetResult; +import static org.elasticsearch.index.get.GetResultTests.mutateGetResult; +import static org.elasticsearch.index.get.GetResultTests.randomGetResult; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class GetResponseTests extends ESTestCase { + + public void testToAndFromXContent() throws Exception { + XContentType xContentType = randomFrom(XContentType.values()); + Tuple tuple = randomGetResult(xContentType); + GetResponse getResponse = new GetResponse(tuple.v1()); + GetResponse expectedGetResponse = new GetResponse(tuple.v2()); + BytesReference originalBytes = toXContent(getResponse, xContentType, false); + //test that we can parse what we print out + GetResponse parsedGetResponse; + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + parsedGetResponse = GetResponse.fromXContent(parser); + assertNull(parser.nextToken()); + } + assertEquals(expectedGetResponse, parsedGetResponse); + //print the parsed object out and test that the output is the same as the original output + BytesReference finalBytes = toXContent(parsedGetResponse, xContentType, false); + assertToXContentEquivalent(originalBytes, finalBytes, xContentType); + //check that the source stays unchanged, no shuffling of keys nor anything like that + assertEquals(expectedGetResponse.getSourceAsString(), parsedGetResponse.getSourceAsString()); + + } + + public void testToXContent() throws IOException { + { + GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + + "\"value1\", \"field2\":\"value2\"}"), Collections.singletonMap("field1", new GetField("field1", + Collections.singletonList("value1"))))); + String output = Strings.toString(getResponse, false); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " + + ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output); + } + { + GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 1, false, null, null)); + String output = Strings.toString(getResponse, false); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); + } + } + + public void testEqualsAndHashcode() { + checkEqualsAndHashCode(new GetResponse(randomGetResult(XContentType.JSON).v1()), GetResponseTests::copyGetResponse, + GetResponseTests::mutateGetResponse); + } + private static GetResponse copyGetResponse(GetResponse getResponse) { + return new GetResponse(copyGetResult(getResponse.getResult)); + } + + private static GetResponse mutateGetResponse(GetResponse getResponse) { + return new GetResponse(mutateGetResult(getResponse.getResult)); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index d5b17861e0f..e42c56046e6 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -167,7 +167,7 @@ public class MultiSearchRequestTests extends ESTestCase { private MultiSearchRequest parseMultiSearchRequest(String sample) throws IOException { byte[] data = StreamsUtils.copyToBytesFromClasspath(sample); - RestRequest restRequest = new FakeRestRequest.Builder().withContent(new BytesArray(data)).build(); + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data)).build(); return RestMultiSearchAction.parseRequest(restRequest, true, parsers(), ParseFieldMatcher.EMPTY); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java index 7d82b744133..ca8f0a4c6b2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java @@ -21,9 +21,8 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.RoutingMissingException; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; @@ -117,97 +116,175 @@ public class ReplicationResponseTests extends ESTestCase { } public void testShardInfoToXContent() throws IOException { - ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(5, 3); + final XContentType xContentType = randomFrom(XContentType.values()); - final XContent xContent = randomFrom(XContentType.values()).xContent(); - try (XContentBuilder builder = XContentBuilder.builder(xContent)) { - builder.startObject(); - shardInfo.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); + final ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(5, 3); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfo, xContentType, true); - // Expected JSON is {"_shards":{"total":5,"successful":3,"failed":0}} - try (XContentParser parser = xContent.createParser(builder.bytes())) { - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("_shards", parser.currentName()); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("total", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getTotal(), parser.intValue()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("successful", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getSuccessful(), parser.intValue()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("failed", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getFailed(), parser.intValue()); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); - } + // Expected JSON is {"_shards":{"total":5,"successful":3,"failed":0}} + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("_shards", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("total", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getTotal(), parser.intValue()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("successful", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getSuccessful(), parser.intValue()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("failed", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getFailed(), parser.intValue()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); } } - public void testRandomShardInfoToXContent() throws IOException { + public void testShardInfoToAndFromXContent() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(randomIntBetween(1, 5), randomIntBetween(1, 5)); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfo, xContentType, true); + + ReplicationResponse.ShardInfo parsedShardInfo; + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + // Move to the start object that was manually added when building the object + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedShardInfo = ReplicationResponse.ShardInfo.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + // We can use assertEquals because the shardInfo doesn't have a failure (and exceptions) + assertEquals(shardInfo, parsedShardInfo); + + BytesReference parsedShardInfoBytes = XContentHelper.toXContent(parsedShardInfo, xContentType, true); + assertEquals(shardInfoBytes, parsedShardInfoBytes); + } + + public void testShardInfoWithFailureToXContent() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + final ReplicationResponse.ShardInfo shardInfo = randomShardInfo(); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfo, xContentType, true); - final XContent xContent = randomFrom(XContentType.values()).xContent(); - try (XContentBuilder builder = XContentBuilder.builder(xContent)) { - builder.startObject(); - shardInfo.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("_shards", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("total", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getTotal(), parser.intValue()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("successful", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getSuccessful(), parser.intValue()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("failed", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); + assertEquals(shardInfo.getFailed(), parser.intValue()); - try (XContentParser parser = xContent.createParser(builder.bytes())) { - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + if (shardInfo.getFailures() != null && shardInfo.getFailures().length > 0) { assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("_shards", parser.currentName()); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("total", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getTotal(), parser.intValue()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("successful", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getSuccessful(), parser.intValue()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("failed", parser.currentName()); - assertEquals(XContentParser.Token.VALUE_NUMBER, parser.nextToken()); - assertEquals(shardInfo.getFailed(), parser.intValue()); + assertEquals("failures", parser.currentName()); + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); - if (shardInfo.getFailures() != null && shardInfo.getFailures().length > 0) { - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - assertEquals("failures", parser.currentName()); - assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); - - for (int i = 0; i < shardInfo.getFailures().length; i++) { - assertFailure(parser, shardInfo.getFailures()[i]); - } - assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); + for (int i = 0; i < shardInfo.getFailures().length; i++) { + assertFailure(parser, shardInfo.getFailures()[i]); } - - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); } + + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + } + + public void testRandomShardInfoFromXContent() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final ReplicationResponse.ShardInfo shardInfo = randomShardInfo(); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfo, xContentType, true); + + ReplicationResponse.ShardInfo parsedShardInfo; + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + // Move to the start object that was manually added when building the object + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedShardInfo = ReplicationResponse.ShardInfo.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + + // We can't use assertEquals to compare the original ShardInfo with the parsed ShardInfo + // because it may include random failures with exceptions, and exception types are not + // preserved during ToXContent->FromXContent process. + assertNotNull(parsedShardInfo); + assertEquals(shardInfo.getTotal(), parsedShardInfo.getTotal()); + assertEquals(shardInfo.getSuccessful(), parsedShardInfo.getSuccessful()); + assertEquals(shardInfo.getFailed(), parsedShardInfo.getFailed()); + assertEquals(shardInfo.getFailures().length, parsedShardInfo.getFailures().length); + + for (int i = 0; i < shardInfo.getFailures().length; i++) { + ReplicationResponse.ShardInfo.Failure parsedFailure = parsedShardInfo.getFailures()[i]; + ReplicationResponse.ShardInfo.Failure failure = shardInfo.getFailures()[i]; + + assertEquals(failure.index(), parsedFailure.index()); + assertEquals(failure.shardId(), parsedFailure.shardId()); + assertEquals(failure.nodeId(), parsedFailure.nodeId()); + assertEquals(failure.status(), parsedFailure.status()); + assertEquals(failure.primary(), parsedFailure.primary()); + + Throwable cause = failure.getCause(); + String expectedMessage = "Elasticsearch exception [type=" + ElasticsearchException.getExceptionName(cause) + + ", reason=" + cause.getMessage() + "]"; + assertEquals(expectedMessage, parsedFailure.getCause().getMessage()); } } public void testRandomFailureToXContent() throws IOException { - ReplicationResponse.ShardInfo.Failure shardInfoFailure = randomFailure(); + final XContentType xContentType = randomFrom(XContentType.values()); - final XContent xContent = randomFrom(XContentType.values()).xContent(); - try (XContentBuilder builder = XContentBuilder.builder(xContent)) { - shardInfoFailure.toXContent(builder, ToXContent.EMPTY_PARAMS); + final ReplicationResponse.ShardInfo.Failure shardInfoFailure = randomFailure(); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfoFailure, xContentType, false); - try (XContentParser parser = xContent.createParser(builder.bytes())) { - assertFailure(parser, shardInfoFailure); - } + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + assertFailure(parser, shardInfoFailure); } } + public void testRandomFailureToAndFromXContent() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final ReplicationResponse.ShardInfo.Failure shardInfoFailure = randomFailure(); + final BytesReference shardInfoBytes = XContentHelper.toXContent(shardInfoFailure, xContentType, false); + + ReplicationResponse.ShardInfo.Failure parsedFailure; + try (XContentParser parser = createParser(xContentType.xContent(), shardInfoBytes)) { + // Move to the first start object + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedFailure = ReplicationResponse.ShardInfo.Failure.fromXContent(parser); + assertNull(parser.nextToken()); + } + + assertEquals(shardInfoFailure.index(), parsedFailure.index()); + assertEquals(shardInfoFailure.shardId(), parsedFailure.shardId()); + assertEquals(shardInfoFailure.nodeId(), parsedFailure.nodeId()); + assertEquals(shardInfoFailure.status(), parsedFailure.status()); + assertEquals(shardInfoFailure.primary(), parsedFailure.primary()); + + Throwable cause = shardInfoFailure.getCause(); + String expectedMessage = "Elasticsearch exception [type=" + ElasticsearchException.getExceptionName(cause) + + ", reason=" + cause.getMessage() + "]"; + assertEquals(expectedMessage, parsedFailure.getCause().getMessage()); + } + private static void assertFailure(XContentParser parser, ReplicationResponse.ShardInfo.Failure failure) throws IOException { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index f05b931e782..0e01c131aa3 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -360,8 +360,8 @@ public class TransportReplicationActionTests extends ESTestCase { public void testClosedIndexOnReroute() throws InterruptedException { final String index = "test"; // no replicas in oder to skip the replication part - setState(clusterService, - new ClusterStateChanges().closeIndices(state(index, true, ShardRoutingState.UNASSIGNED), new CloseIndexRequest(index))); + setState(clusterService, new ClusterStateChanges(xContentRegistry()).closeIndices(state(index, true, ShardRoutingState.UNASSIGNED), + new CloseIndexRequest(index))); logger.debug("--> using initial state:\n{}", clusterService.state()); Request request = new Request(new ShardId("test", "_na_", 0)).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 8c342265a28..be95f8dc581 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.ShardId; @@ -56,10 +57,10 @@ public class UpdateRequestTests extends ESTestCase { public void testUpdateRequest() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); // simple script - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .field("script", "script1") - .endObject().bytes())); + .endObject())); Script script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -69,9 +70,9 @@ public class UpdateRequestTests extends ESTestCase { assertThat(params, equalTo(Collections.emptyMap())); // simple verbose script - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject() + request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() .startObject("script").field("inline", "script1").endObject() - .endObject().bytes())); + .endObject())); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -82,13 +83,13 @@ public class UpdateRequestTests extends ESTestCase { // script with params request = new UpdateRequest("test", "type", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject() + request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() .startObject("script") .field("inline", "script1") .startObject("params") .field("param1", "value1") .endObject() - .endObject().endObject().bytes())); + .endObject().endObject())); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -100,7 +101,7 @@ public class UpdateRequestTests extends ESTestCase { assertThat(params.get("param1").toString(), equalTo("value1")); request = new UpdateRequest("test", "type", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .startObject("script") .startObject("params") @@ -108,7 +109,7 @@ public class UpdateRequestTests extends ESTestCase { .endObject() .field("inline", "script1") .endObject() - .endObject().bytes())); + .endObject())); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -121,7 +122,7 @@ public class UpdateRequestTests extends ESTestCase { // script with params and upsert request = new UpdateRequest("test", "type", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject() + request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() .startObject("script") .startObject("params") .field("param1", "value1") @@ -133,7 +134,7 @@ public class UpdateRequestTests extends ESTestCase { .startObject("compound") .field("field2", "value2") .endObject() - .endObject().endObject().bytes())); + .endObject().endObject())); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -148,7 +149,7 @@ public class UpdateRequestTests extends ESTestCase { assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); request = new UpdateRequest("test", "type", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject() + request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() .startObject("upsert") .field("field1", "value1") .startObject("compound") @@ -160,7 +161,7 @@ public class UpdateRequestTests extends ESTestCase { .field("param1", "value1") .endObject() .field("inline", "script1") - .endObject().endObject().bytes())); + .endObject().endObject())); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -176,7 +177,7 @@ public class UpdateRequestTests extends ESTestCase { // script with doc request = new UpdateRequest("test", "type", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .startObject("doc") .field("field1", "value1") @@ -184,7 +185,7 @@ public class UpdateRequestTests extends ESTestCase { .field("field2", "value2") .endObject() .endObject() - .endObject().bytes())); + .endObject())); Map doc = request.doc().sourceAsMap(); assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); @@ -192,54 +193,54 @@ public class UpdateRequestTests extends ESTestCase { // Related to issue 15338 public void testFieldsParsing() throws Exception { - UpdateRequest request = new UpdateRequest("test", "type1", "1") - .fromXContent(XContentHelper.createParser(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}"))); + UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent( + createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}"))); assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(request.fields(), arrayContaining("_source")); - request = new UpdateRequest("test", "type2", "2").fromXContent( - XContentHelper.createParser(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}"))); + request = new UpdateRequest("test", "type2", "2").fromXContent(createParser(JsonXContent.jsonXContent, + new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}"))); assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(request.fields(), arrayContaining("field1", "field2")); } public void testFetchSourceParsing() throws Exception { UpdateRequest request = new UpdateRequest("test", "type1", "1"); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .field("_source", true) - .endObject().bytes())); + .endObject())); assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource().includes().length, equalTo(0)); assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().fetchSource(), equalTo(true)); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .field("_source", false) - .endObject().bytes())); + .endObject())); assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource().includes().length, equalTo(0)); assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().fetchSource(), equalTo(false)); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .field("_source", "path.inner.*") - .endObject().bytes())); + .endObject())); assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource().fetchSource(), equalTo(true)); assertThat(request.fetchSource().includes().length, equalTo(1)); assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*")); - request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder() + request.fromXContent(createParser(XContentFactory.jsonBuilder() .startObject() .startObject("_source") .field("includes", "path.inner.*") .field("excludes", "another.inner.*") .endObject() - .endObject().bytes())); + .endObject())); assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource().fetchSource(), equalTo(true)); assertThat(request.fetchSource().includes().length, equalTo(1)); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 9a1417bdfa6..c31bbe8c74f 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.monitor.jvm.JvmInfo; import java.nio.file.Path; +import java.util.Map; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.containsString; @@ -152,9 +153,9 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { true, output -> {}, (foreground, pidFile, quiet, esSettings) -> { - assertThat(esSettings.size(), equalTo(2)); - assertThat(esSettings, hasEntry("foo", "bar")); - assertThat(esSettings, hasEntry("baz", "qux")); + Map settings = esSettings.getAsMap(); + assertThat(settings, hasEntry("foo", "bar")); + assertThat(settings, hasEntry("baz", "qux")); }, "-Efoo=bar", "-E", "baz=qux" ); diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index ce6704cef82..e0add08b074 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -54,6 +54,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongConsumer; +import java.util.function.Predicate; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; @@ -88,9 +89,9 @@ public class ShardStateActionTests extends ESTestCase { } @Override - protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardEntry shardEntry, Listener listener) { + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardEntry shardEntry, Listener listener, Predicate changePredicate) { onBeforeWaitForNewMasterAndRetry.run(); - super.waitForNewMasterAndRetry(actionName, observer, shardEntry, listener); + super.waitForNewMasterAndRetry(actionName, observer, shardEntry, listener, changePredicate); onAfterWaitForNewMasterAndRetry.run(); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index 8a342057dab..3b894eb8d7c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -41,7 +41,7 @@ public class MetaDataIndexAliasesServiceTests extends ESTestCase { private final AliasValidator aliasValidator = new AliasValidator(Settings.EMPTY); private final MetaDataDeleteIndexService deleteIndexService = mock(MetaDataDeleteIndexService.class); private final MetaDataIndexAliasesService service = new MetaDataIndexAliasesService(Settings.EMPTY, null, null, aliasValidator, - deleteIndexService); + deleteIndexService, xContentRegistry()); public MetaDataIndexAliasesServiceTests() { // Mock any deletes so we don't need to worry about how MetaDataDeleteIndexService does its job diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index a1dfdbc74f9..88e2835b5ab 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -30,7 +30,8 @@ import java.util.Collections; public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testArchiveBrokenIndexSettings() { - MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); IndexMetaData src = newIndexMeta("foo", Settings.EMPTY); IndexMetaData indexMetaData = service.archiveBrokenIndexSettings(src); assertSame(indexMetaData, src); @@ -56,8 +57,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { } public void testUpgrade() { - MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), - Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); src = service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion()); @@ -68,8 +69,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { } public void testIsUpgraded() { - MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), - Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion()); @@ -80,8 +81,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { } public void testFailUpgrade() { - MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), - Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); final IndexMetaData metaData = newIndexMeta("foo", Settings.builder() .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.4.0")) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 3aba43f366a..d005b05737d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -786,8 +786,9 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } catch (IllegalArgumentException e) { assertThat("can't be allocated because there isn't enough room: " + e.getMessage(), e.getMessage(), - containsString("the node is above the low watermark [cluster.routing.allocation.disk.watermark.low=0.7], using " + - "more disk space than the maximum allowed [70.0%], actual free: [26.0%]")); + containsString("the node is above the low watermark cluster setting " + + "[cluster.routing.allocation.disk.watermark.low=0.7], using more disk space than the maximum " + + "allowed [70.0%], actual free: [26.0%]")); } } @@ -858,7 +859,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat(((Decision.Single) decision).getExplanation(), containsString( - "the shard cannot remain on this node because it is above the high watermark " + + "the shard cannot remain on this node because it is above the high watermark cluster setting " + "[cluster.routing.allocation.disk.watermark.high=70%] and there is less than the required [30.0%] free disk on node, " + "actual free: [20.0%]")); @@ -890,12 +891,12 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { assertThat(decision.type(), equalTo(Decision.Type.NO)); if (fooRouting.recoverySource().getType() == RecoverySource.Type.EMPTY_STORE) { assertThat(((Decision.Single) decision).getExplanation(), containsString( - "the node is above the high watermark [cluster.routing.allocation.disk.watermark.high=70%], using more disk space than " + - "the maximum allowed [70.0%], actual free: [20.0%]")); + "the node is above the high watermark cluster setting [cluster.routing.allocation.disk.watermark.high=70%], using " + + "more disk space than the maximum allowed [70.0%], actual free: [20.0%]")); } else { assertThat(((Decision.Single) decision).getExplanation(), containsString( - "the node is above the low watermark [cluster.routing.allocation.disk.watermark.low=60%], using more disk space than " + - "the maximum allowed [60.0%], actual free: [20.0%]")); + "the node is above the low watermark cluster setting [cluster.routing.allocation.disk.watermark.low=60%], using more " + + "disk space than the maximum allowed [60.0%], actual free: [20.0%]")); } // Creating AllocationService instance and the services it depends on... diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 659c3b25833..7379ee78d03 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -106,8 +106,8 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { decision = decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation); assertEquals(mostAvailableUsage.toString(), Decision.Type.NO, decision.type()); assertThat(((Decision.Single) decision).getExplanation(), containsString( - "the node is above the high watermark [cluster.routing.allocation.disk.watermark.high=90%], using more disk space than " + - "the maximum allowed [90.0%]")); + "the node is above the high watermark cluster setting [cluster.routing.allocation.disk.watermark.high=90%], using more " + + "disk space than the maximum allowed [90.0%]")); } public void testCanRemainUsesLeastAvailableSpace() { @@ -181,8 +181,8 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { decision = decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation); assertEquals(Decision.Type.NO, decision.type()); assertThat(((Decision.Single) decision).getExplanation(), containsString("the shard cannot remain on this node because it is " + - "above the high watermark [cluster.routing.allocation.disk.watermark.high=90%] and there is less than the required [10.0%] " + - "free disk on node, actual free: [9.0%]")); + "above the high watermark cluster setting [cluster.routing.allocation.disk.watermark.high=90%] and there is less than " + + "the required [10.0%] free disk on node, actual free: [9.0%]")); try { decider.canRemain(test_0, new RoutingNode("node_1", node_1), allocation); fail("not allocated on this node"); diff --git a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java index 1f348000ee4..59b37f9e9e6 100644 --- a/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java +++ b/core/src/test/java/org/elasticsearch/common/ParseFieldTests.java @@ -20,77 +20,45 @@ package org.elasticsearch.common; import org.elasticsearch.test.ESTestCase; -import static org.hamcrest.CoreMatchers.containsString; +import java.io.IOException; + import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.collection.IsArrayContainingInAnyOrder.arrayContainingInAnyOrder; public class ParseFieldTests extends ESTestCase { - public void testParse() { + public void testParse() throws IOException { String name = "foo_bar"; ParseField field = new ParseField(name); String[] deprecated = new String[]{"barFoo", "bar_foo", "Foobar"}; ParseField withDeprecations = field.withDeprecation(deprecated); assertThat(field, not(sameInstance(withDeprecations))); - assertThat(field.match(name, false), is(true)); - assertThat(field.match("foo bar", false), is(false)); + assertThat(field.match(name), is(true)); + assertThat(field.match("foo bar"), is(false)); for (String deprecatedName : deprecated) { - assertThat(field.match(deprecatedName, false), is(false)); + assertThat(field.match(deprecatedName), is(false)); } - assertThat(withDeprecations.match(name, false), is(true)); - assertThat(withDeprecations.match("foo bar", false), is(false)); + assertThat(withDeprecations.match(name), is(true)); + assertThat(withDeprecations.match("foo bar"), is(false)); for (String deprecatedName : deprecated) { - assertThat(withDeprecations.match(deprecatedName, false), is(true)); - } - - // now with strict mode - assertThat(field.match(name, true), is(true)); - assertThat(field.match("foo bar", true), is(false)); - for (String deprecatedName : deprecated) { - assertThat(field.match(deprecatedName, true), is(false)); - } - - assertThat(withDeprecations.match(name, true), is(true)); - assertThat(withDeprecations.match("foo bar", true), is(false)); - for (String deprecatedName : deprecated) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - withDeprecations.match(deprecatedName, true); - }); - assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead")); + assertThat(withDeprecations.match(deprecatedName), is(true)); + assertWarnings("Deprecated field [" + deprecatedName + "] used, expected [foo_bar] instead"); } } - public void testAllDeprecated() { + public void testAllDeprecated() throws IOException { String name = "like_text"; - - boolean withDeprecatedNames = randomBoolean(); String[] deprecated = new String[]{"text", "same_as_text"}; - String[] allValues; - if (withDeprecatedNames) { - String[] newArray = new String[1 + deprecated.length]; - newArray[0] = name; - System.arraycopy(deprecated, 0, newArray, 1, deprecated.length); - allValues = newArray; - } else { - allValues = new String[] {name}; - } - - ParseField field; - if (withDeprecatedNames) { - field = new ParseField(name).withDeprecation(deprecated).withAllDeprecated("like"); - } else { - field = new ParseField(name).withAllDeprecated("like"); - } - - // strict mode off - assertThat(field.match(randomFrom(allValues), false), is(true)); - assertThat(field.match("not a field name", false), is(false)); - - // now with strict mode - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> field.match(randomFrom(allValues), true)); - assertThat(e.getMessage(), containsString(" used, replaced by [like]")); + ParseField field = new ParseField(name).withDeprecation(deprecated).withAllDeprecated("like"); + assertFalse(field.match("not a field name")); + assertTrue(field.match("text")); + assertWarnings("Deprecated field [text] used, replaced by [like]"); + assertTrue(field.match("same_as_text")); + assertWarnings("Deprecated field [same_as_text] used, replaced by [like]"); + assertTrue(field.match("like_text")); + assertWarnings("Deprecated field [like_text] used, replaced by [like]"); } public void testGetAllNamesIncludedDeprecated() { diff --git a/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java index f75e73ced2c..d0e1b807baf 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java @@ -41,6 +41,12 @@ public class DeprecationLoggerTests extends ESTestCase { private final DeprecationLogger logger = new DeprecationLogger(Loggers.getLogger(getClass())); + @Override + protected boolean enableWarningsCheck() { + //this is a low level test for the deprecation logger, setup and checks are done manually + return false; + } + public void testAddsHeaderWithThreadContext() throws IOException { String msg = "A simple message [{}]"; String param = randomAsciiOfLengthBetween(1, 5); @@ -54,7 +60,7 @@ public class DeprecationLoggerTests extends ESTestCase { Map> responseHeaders = threadContext.getResponseHeaders(); assertEquals(1, responseHeaders.size()); - assertEquals(formatted, responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER).get(0)); + assertEquals(formatted, responseHeaders.get(DeprecationLogger.WARNING_HEADER).get(0)); } } @@ -74,7 +80,7 @@ public class DeprecationLoggerTests extends ESTestCase { assertEquals(1, responseHeaders.size()); - List responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + List responses = responseHeaders.get(DeprecationLogger.WARNING_HEADER); assertEquals(2, responses.size()); assertEquals(formatted, responses.get(0)); @@ -93,7 +99,7 @@ public class DeprecationLoggerTests extends ESTestCase { logger.deprecated(expected); Map> responseHeaders = threadContext.getResponseHeaders(); - List responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + List responses = responseHeaders.get(DeprecationLogger.WARNING_HEADER); // ensure it works (note: concurrent tests may be adding to it, but in different threads, so it should have no impact) assertThat(responses, hasSize(atLeast(1))); @@ -104,7 +110,7 @@ public class DeprecationLoggerTests extends ESTestCase { logger.deprecated(unexpected); responseHeaders = threadContext.getResponseHeaders(); - responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + responses = responseHeaders.get(DeprecationLogger.WARNING_HEADER); assertThat(responses, hasSize(atLeast(1))); assertThat(responses, hasItem(expected)); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 3e29068e672..b1ec686c395 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerAdapter; import org.elasticsearch.http.HttpServerTransport; @@ -137,6 +138,7 @@ public class NetworkModuleTests extends ModuleTestCase { BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { return Collections.singletonMap("custom", custom); } @@ -176,6 +178,7 @@ public class NetworkModuleTests extends ModuleTestCase { BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); @@ -208,6 +211,7 @@ public class NetworkModuleTests extends ModuleTestCase { BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); @@ -252,6 +256,6 @@ public class NetworkModuleTests extends ModuleTestCase { } private NetworkModule newNetworkModule(Settings settings, boolean transportClient, NetworkPlugin... plugins) { - return new NetworkModule(settings, transportClient, Arrays.asList(plugins), null, null, null, null, null); + return new NetworkModule(settings, transportClient, Arrays.asList(plugins), null, null, null, null, xContentRegistry(), null); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 3f6f1848fd8..b370250bf9d 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -20,9 +20,8 @@ package org.elasticsearch.common.unit; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -31,7 +30,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.sameInstance; -import static org.hamcrest.number.IsCloseTo.closeTo; public class FuzzinessTests extends ESTestCase { public void testNumerics() { @@ -43,12 +41,11 @@ public class FuzzinessTests extends ESTestCase { final int iters = randomIntBetween(10, 50); for (int i = 0; i < iters; i++) { { - XContent xcontent = XContentType.JSON.xContent(); float floatValue = randomFloat(); - String json = jsonBuilder().startObject() + XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, floatValue) - .endObject().string(); - XContentParser parser = xcontent.createParser(json); + .endObject(); + XContentParser parser = createParser(json); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); @@ -57,17 +54,16 @@ public class FuzzinessTests extends ESTestCase { assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } { - XContent xcontent = XContentType.JSON.xContent(); Integer intValue = frequently() ? randomIntBetween(0, 2) : randomIntBetween(0, 100); Float floatRep = randomFloat(); Number value = intValue; if (randomBoolean()) { value = new Float(floatRep += intValue); } - String json = jsonBuilder().startObject() + XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? value.toString() : value) - .endObject().string(); - XContentParser parser = xcontent.createParser(json); + .endObject(); + XContentParser parser = createParser(json); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); @@ -93,14 +89,15 @@ public class FuzzinessTests extends ESTestCase { } } { - XContent xcontent = XContentType.JSON.xContent(); - String json = jsonBuilder().startObject() - .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? "AUTO" : "auto") - .endObject().string(); + XContentBuilder json; if (randomBoolean()) { - json = Fuzziness.AUTO.toXContent(jsonBuilder().startObject(), null).endObject().string(); + json = Fuzziness.AUTO.toXContent(jsonBuilder().startObject(), null).endObject(); + } else { + json = jsonBuilder().startObject() + .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? "AUTO" : "auto") + .endObject(); } - XContentParser parser = xcontent.createParser(json); + XContentParser parser = createParser(json); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/core/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 1c0a92dbd74..e468751cf4a 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -21,11 +21,13 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; - import com.fasterxml.jackson.core.JsonParseException; + import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -751,7 +753,7 @@ public abstract class BaseXContentTestCase extends ESTestCase { generator.writeEndObject(); } - XContentParser parser = xcontentType().xContent().createParser(os.toByteArray()); + XContentParser parser = xcontentType().xContent().createParser(NamedXContentRegistry.EMPTY, os.toByteArray()); assertEquals(Token.START_OBJECT, parser.nextToken()); assertEquals(Token.FIELD_NAME, parser.nextToken()); assertEquals("bar", parser.currentName()); @@ -785,7 +787,7 @@ public abstract class BaseXContentTestCase extends ESTestCase { generator.writeRawValue(new BytesArray(rawData)); } - XContentParser parser = xcontentType().xContent().createParser(os.toByteArray()); + XContentParser parser = xcontentType().xContent().createParser(NamedXContentRegistry.EMPTY, os.toByteArray()); assertEquals(Token.START_OBJECT, parser.nextToken()); assertEquals(Token.FIELD_NAME, parser.nextToken()); assertEquals("foo", parser.currentName()); @@ -801,7 +803,7 @@ public abstract class BaseXContentTestCase extends ESTestCase { generator.writeEndObject(); } - parser = xcontentType().xContent().createParser(os.toByteArray()); + parser = xcontentType().xContent().createParser(NamedXContentRegistry.EMPTY, os.toByteArray()); assertEquals(Token.START_OBJECT, parser.nextToken()); assertEquals(Token.FIELD_NAME, parser.nextToken()); assertEquals("test", parser.currentName()); @@ -829,7 +831,7 @@ public abstract class BaseXContentTestCase extends ESTestCase { generator.flush(); byte[] serialized = os.toByteArray(); - XContentParser parser = xcontentType().xContent().createParser(serialized); + XContentParser parser = xcontentType().xContent().createParser(NamedXContentRegistry.EMPTY, serialized); Map map = parser.map(); assertEquals("bar", map.get("foo")); assertEquals(bigInteger, map.get("bigint")); @@ -990,17 +992,50 @@ public abstract class BaseXContentTestCase extends ESTestCase { assumeTrue("Test only makes sense if XContent parser has strict duplicate checks enabled", XContent.isStrictDuplicateDetectionEnabled()); - BytesReference bytes = builder() + XContentBuilder builder = builder() .startObject() .field("key", 1) .field("key", 2) - .endObject() - .bytes(); + .endObject(); - JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(xcontentType().xContent(), bytes).map()); + JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(builder).map()); assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); } + public void testNamedObject() throws IOException { + Object test1 = new Object(); + Object test2 = new Object(); + NamedXContentRegistry registry = new NamedXContentRegistry(Arrays.asList( + new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), + new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), + new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); + XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); + b.value("test"); + XContentParser p = xcontentType().xContent().createParser(registry, b.bytes()); + assertEquals(test1, p.namedObject(Object.class, "test1", null)); + assertEquals(test2, p.namedObject(Object.class, "test2", null)); + assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); + assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); + { + p.nextToken(); + assertEquals("test", p.namedObject(Object.class, "str", null)); + NamedXContentRegistry.UnknownNamedObjectException e = expectThrows(NamedXContentRegistry.UnknownNamedObjectException.class, + () -> p.namedObject(Object.class, "unknown", null)); + assertEquals("Unknown Object [unknown]", e.getMessage()); + assertEquals("java.lang.Object", e.getCategoryClass()); + assertEquals("unknown", e.getName()); + } + { + Exception e = expectThrows(ElasticsearchException.class, () -> p.namedObject(String.class, "doesn't matter", null)); + assertEquals("Unknown namedObject category [java.lang.String]", e.getMessage()); + } + { + XContentParser emptyRegistryParser = xcontentType().xContent().createParser(NamedXContentRegistry.EMPTY, new byte[] {}); + Exception e = expectThrows(ElasticsearchException.class, + () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); + assertEquals("namedObject is not supported for this parser", e.getMessage()); + } + } private static void expectUnclosedException(ThrowingRunnable runnable) { IllegalStateException e = expectThrows(IllegalStateException.class, runnable); diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 46c0ba35723..3c1bc509328 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -180,8 +180,8 @@ public class ObjectParserTests extends ESTestCase { } } } - XContentParser parser = XContentType.JSON.xContent() - .createParser("{\"url\" : { \"host\": \"http://foobar\", \"port\" : 80}, \"name\" : \"foobarbaz\"}"); + XContentParser parser = createParser(JsonXContent.jsonXContent, + "{\"url\" : { \"host\": \"http://foobar\", \"port\" : 80}, \"name\" : \"foobarbaz\"}"); ObjectParser objectParser = new ObjectParser<>("foo"); objectParser.declareString(Foo::setName, new ParseField("name")); objectParser.declareObjectOrDefault(Foo::setURI, (p, s) -> s.parseURI(p), () -> null, new ParseField("url")); @@ -218,27 +218,17 @@ public class ObjectParserTests extends ESTestCase { } } - public void testDeprecationFail() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"old_test\" : \"foo\"}"); + public void testDeprecationWarnings() throws IOException { class TestStruct { public String test; } ObjectParser objectParser = new ObjectParser<>("foo"); TestStruct s = new TestStruct(); - + XContentParser parser = createParser(XContentType.JSON.xContent(), "{\"old_test\" : \"foo\"}"); objectParser.declareField((i, v, c) -> v.test = i.text(), new ParseField("test", "old_test"), ObjectParser.ValueType.STRING); - - try { - objectParser.parse(parser, s, STRICT_PARSING); - fail("deprecated value"); - } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Deprecated field [old_test] used, expected [test] instead"); - - } - assertNull(s.test); - parser = createParser(JsonXContent.jsonXContent, "{\"old_test\" : \"foo\"}"); objectParser.parse(parser, s, () -> ParseFieldMatcher.EMPTY); assertEquals("foo", s.test); + assertWarnings("Deprecated field [old_test] used, expected [test] instead"); } public void testFailOnValueType() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java new file mode 100644 index 00000000000..5ed3b0cf8c2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class UnknownNamedObjectExceptionTests extends ESTestCase { + public void testRoundTrip() throws IOException { + XContentLocation location = new XContentLocation(between(1, 1000), between(1, 1000)); + UnknownNamedObjectException created = new UnknownNamedObjectException(location, UnknownNamedObjectExceptionTests.class, + randomAsciiOfLength(5)); + UnknownNamedObjectException roundTripped; + + try (BytesStreamOutput out = new BytesStreamOutput()) { + created.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + roundTripped = new UnknownNamedObjectException(in); + } + } + assertEquals(created.getMessage(), roundTripped.getMessage()); + assertEquals(created.getLineNumber(), roundTripped.getLineNumber()); + assertEquals(created.getColumnNumber(), roundTripped.getColumnNumber()); + assertEquals(created.getCategoryClass(), roundTripped.getCategoryClass()); + assertEquals(created.getName(), roundTripped.getName()); + } + + public void testStatusCode() { + XContentLocation location = new XContentLocation(between(1, 1000), between(1, 1000)); + UnknownNamedObjectException e = new UnknownNamedObjectException(location, UnknownNamedObjectExceptionTests.class, + randomAsciiOfLength(5)); + assertEquals(RestStatus.BAD_REQUEST, e.status()); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java index bec1a0e4fb5..00edb25d5c3 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java @@ -20,62 +20,24 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; import java.io.IOException; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public class XContentParserUtilsTests extends ESTestCase { - - private XContentType xContentType; - - @Before - public void setUp() throws Exception { - super.setUp(); - xContentType = randomFrom(XContentType.values()); - } - - public void testEnsureFieldName() throws IOException { - ParsingException e = expectThrows(ParsingException.class, () -> { - XContentParser parser = createParser(createBuilder().startObject().endObject().bytes()); + public void testEnsureExpectedToken() throws IOException { + final XContentParser.Token randomToken = randomFrom(XContentParser.Token.values()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{}")) { // Parser current token is null assertNull(parser.currentToken()); - XContentParserUtils.ensureFieldName(parser.currentToken(), parser::getTokenLocation); - }); - assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [null]")); - - e = expectThrows(ParsingException.class, () -> { - XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes()); - // Parser next token is a start object - XContentParserUtils.ensureFieldName(parser.nextToken(), parser::getTokenLocation); - }); - assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [START_OBJECT]")); - - e = expectThrows(ParsingException.class, () -> { - XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes()); - // Moves to start object - assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT)); - // Expected field name is "foo", not "test" - XContentParserUtils.ensureFieldName(parser, parser.nextToken(), "test"); - }); - assertThat(e.getMessage(), equalTo("Failed to parse object: expecting field with name [test] but found [foo]")); - - // Everything is fine - final String randomFieldName = randomAsciiOfLength(5); - XContentParser parser = createParser(createBuilder().startObject().field(randomFieldName, 0).endObject().bytes()); - assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT)); - XContentParserUtils.ensureFieldName(parser, parser.nextToken(), randomFieldName); - } - - private XContentBuilder createBuilder() throws IOException { - return XContentBuilder.builder(xContentType.xContent()); - } - - private XContentParser createParser(BytesReference bytes) throws IOException { - return xContentType.xContent().createParser(bytes); + ParsingException e = expectThrows(ParsingException.class, + () -> ensureExpectedToken(randomToken, parser.currentToken(), parser::getTokenLocation)); + assertEquals("Failed to parse object: expecting token of type [" + randomToken + "] but found [null]", e.getMessage()); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); + } } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java index a8091fc1122..2401a3550e9 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java @@ -19,10 +19,16 @@ package org.elasticsearch.common.xcontent.support; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -61,5 +67,27 @@ public class XContentHelperTests extends ESTestCase { assertThat(content, Matchers.equalTo(expected)); } - + public void testToXContentWrapInObject() throws IOException { + boolean wrapInObject = randomBoolean(); + XContentType xContentType = randomFrom(XContentType.values()); + ToXContent toXContent = (builder, params) -> { + if (wrapInObject == false) { + builder.startObject(); + } + builder.field("field", "value"); + if (wrapInObject == false) { + builder.endObject(); + } + return builder; + }; + BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, wrapInObject); + try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, bytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertTrue(parser.nextToken().isValue()); + assertEquals("value", parser.text()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 90d7e5e5a9e..d5e9ddcde9a 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -82,8 +82,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index b106ca64227..f5a5391436a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -392,7 +392,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { private final boolean upgrade; public MockMetaDataIndexUpgradeService(boolean upgrade) { - super(Settings.EMPTY, null, null); + super(Settings.EMPTY, null, null, null); this.upgrade = upgrade; } @Override diff --git a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java index 2d827ebb6e4..db9eebfe5fc 100644 --- a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java +++ b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.rest.AbstractRestChannel; @@ -192,7 +193,7 @@ public class HttpServerTests extends ESTestCase { private final BytesReference content; private TestRestRequest(String path, String content) { - super(Collections.emptyMap(), path); + super(NamedXContentRegistry.EMPTY, Collections.emptyMap(), path); this.content = new BytesArray(content); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 6b24c2e356c..e9b52589b9c 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -144,8 +144,8 @@ public class IndexModuleTests extends ESTestCase { } private IndexService newIndexService(IndexModule module) throws IOException { - return module.newIndexService(nodeEnvironment, deleter, circuitBreakerService, bigArrays, threadPool, scriptService, - indicesQueriesRegistry, clusterService, null, indicesQueryCache, mapperRegistry, shardId -> {}, + return module.newIndexService(nodeEnvironment, xContentRegistry(), deleter, circuitBreakerService, bigArrays, threadPool, + scriptService, indicesQueriesRegistry, clusterService, null, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 4a5a0b95672..dedd478e3bf 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -104,7 +104,7 @@ public class AnalysisRegistryTests extends ESTestCase { assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); } - public void testBackCompatOverrideDefaultIndexAnalyzer() { + public void testBackCompatOverrideDefaultIndexAnalyzer() throws IOException { Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -113,6 +113,8 @@ public class AnalysisRegistryTests extends ESTestCase { assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); + assertWarnings("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] " + + "instead for index [index]"); } public void testOverrideDefaultSearchAnalyzer() { @@ -125,7 +127,7 @@ public class AnalysisRegistryTests extends ESTestCase { assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); } - public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { + public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() throws IOException { Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -137,6 +139,8 @@ public class AnalysisRegistryTests extends ESTestCase { assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); + assertWarnings("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] " + + "instead for index [index]"); } public void testConfigureCamelCaseTokenFilter() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index ea89acbd8f8..e49e21bd2ff 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -89,7 +89,7 @@ public class CodecTests extends ESTestCase { dir.close(); } - private static CodecService createCodecService() throws IOException { + private CodecService createCodecService() throws IOException { Settings nodeSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); @@ -97,7 +97,8 @@ public class CodecTests extends ESTestCase { SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); IndexAnalyzers indexAnalyzers = createTestAnalysis(settings, nodeSettings).indexAnalyzers; MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); - MapperService service = new MapperService(settings, indexAnalyzers, similarityService, mapperRegistry, () -> null); + MapperService service = new MapperService(settings, indexAnalyzers, xContentRegistry(), similarityService, mapperRegistry, + () -> null); return new CodecService(service, ESLoggerFactory.getLogger("test")); } diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 523efb6829c..76e1b37e4dc 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -85,6 +85,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; @@ -378,7 +379,7 @@ public class InternalEngineTests extends ESTestCase { }; EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, deletionPolicy, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, - new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), + new TranslogHandler(xContentRegistry(), shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), refreshListener, maxUnsafeAutoIdTimestamp); @@ -2305,7 +2306,7 @@ public class InternalEngineTests extends ESTestCase { public final AtomicInteger recoveredOps = new AtomicInteger(0); - public TranslogHandler(String indexName, Logger logger) { + public TranslogHandler(NamedXContentRegistry xContentRegistry, String indexName, Logger logger) { super(new ShardId("test", "_na_", 0), null, logger); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); Index index = new Index(indexName, "_na_"); @@ -2314,7 +2315,8 @@ public class InternalEngineTests extends ESTestCase { IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultAnalyzer, defaultAnalyzer, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); - mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, mapperRegistry, () -> null); + mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, + () -> null); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java b/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java new file mode 100644 index 00000000000..1058420afa7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.get; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.ParentFieldMapper; +import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.elasticsearch.index.mapper.UidFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.RandomObjects; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class GetFieldTests extends ESTestCase { + + public void testToXContent() throws IOException { + GetField getField = new GetField("field", Arrays.asList("value1", "value2")); + String output = Strings.toString(getField, true); + assertEquals("{\"field\":[\"value1\",\"value2\"]}", output); + } + + public void testEqualsAndHashcode() { + checkEqualsAndHashCode(randomGetField(XContentType.JSON).v1(), GetFieldTests::copyGetField, GetFieldTests::mutateGetField); + } + + public void testToAndFromXContent() throws Exception { + XContentType xContentType = randomFrom(XContentType.values()); + Tuple tuple = randomGetField(xContentType); + GetField getField = tuple.v1(); + GetField expectedGetField = tuple.v2(); + BytesReference originalBytes = toXContent(getField, xContentType, true); + //test that we can parse what we print out + GetField parsedGetField; + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + //we need to move to the next token, the start object one that we manually added is not expected + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedGetField = GetField.fromXContent(parser); + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + assertEquals(expectedGetField, parsedGetField); + BytesReference finalBytes = toXContent(parsedGetField, xContentType, true); + assertToXContentEquivalent(originalBytes, finalBytes, xContentType); + } + + private static GetField copyGetField(GetField getField) { + return new GetField(getField.getName(), getField.getValues()); + } + + private static GetField mutateGetField(GetField getField) { + List> mutations = new ArrayList<>(); + mutations.add(() -> new GetField(randomUnicodeOfCodepointLength(15), getField.getValues())); + mutations.add(() -> new GetField(getField.getName(), randomGetField(XContentType.JSON).v1().getValues())); + return randomFrom(mutations).get(); + } + + public static Tuple randomGetField(XContentType xContentType) { + if (randomBoolean()) { + String fieldName = randomFrom(ParentFieldMapper.NAME, RoutingFieldMapper.NAME, UidFieldMapper.NAME); + GetField getField = new GetField(fieldName, Collections.singletonList(randomAsciiOfLengthBetween(3, 10))); + return Tuple.tuple(getField, getField); + } + String fieldName = randomAsciiOfLengthBetween(3, 10); + Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); + GetField input = new GetField(fieldName, tuple.v1()); + GetField expected = new GetField(fieldName, tuple.v2()); + return Tuple.tuple(input, expected); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java new file mode 100644 index 00000000000..d82c4221404 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -0,0 +1,152 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.get; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.RandomObjects; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.index.get.GetFieldTests.randomGetField; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class GetResultTests extends ESTestCase { + + public void testToAndFromXContent() throws Exception { + XContentType xContentType = randomFrom(XContentType.values()); + Tuple tuple = randomGetResult(xContentType); + GetResult getResult = tuple.v1(); + GetResult expectedGetResult = tuple.v2(); + BytesReference originalBytes = toXContent(getResult, xContentType, false); + //test that we can parse what we print out + GetResult parsedGetResult; + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + parsedGetResult = GetResult.fromXContent(parser); + assertNull(parser.nextToken()); + } + assertEquals(expectedGetResult, parsedGetResult); + //print the parsed object out and test that the output is the same as the original output + BytesReference finalBytes = toXContent(parsedGetResult, xContentType, false); + assertToXContentEquivalent(originalBytes, finalBytes, xContentType); + //check that the source stays unchanged, no shuffling of keys nor anything like that + assertEquals(expectedGetResult.sourceAsString(), parsedGetResult.sourceAsString()); + } + + public void testToXContent() throws IOException { + { + GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + + "\"value1\", \"field2\":\"value2\"}"), Collections.singletonMap("field1", new GetField("field1", + Collections.singletonList("value1")))); + String output = Strings.toString(getResult, false); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " + + ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output); + } + { + GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null); + String output = Strings.toString(getResult, false); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); + } + } + + public void testEqualsAndHashcode() { + checkEqualsAndHashCode(randomGetResult(XContentType.JSON).v1(), GetResultTests::copyGetResult, GetResultTests::mutateGetResult); + } + + public static GetResult copyGetResult(GetResult getResult) { + return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()); + } + + public static GetResult mutateGetResult(GetResult getResult) { + List> mutations = new ArrayList<>(); + mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomPositiveLong(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), randomGetFields(XContentType.JSON).v1())); + return randomFrom(mutations).get(); + } + + public static Tuple randomGetResult(XContentType xContentType) { + final String index = randomAsciiOfLengthBetween(3, 10); + final String type = randomAsciiOfLengthBetween(3, 10); + final String id = randomAsciiOfLengthBetween(3, 10); + final long version; + final boolean exists; + BytesReference source = null; + Map fields = null; + Map expectedFields = null; + if (frequently()) { + version = randomPositiveLong(); + exists = true; + if (frequently()) { + source = RandomObjects.randomSource(random()); + } + if (randomBoolean()) { + Tuple, Map> tuple = randomGetFields(xContentType); + fields = tuple.v1(); + expectedFields = tuple.v2(); + } + } else { + version = -1; + exists = false; + } + GetResult getResult = new GetResult(index, type, id, version, exists, source, fields); + GetResult expectedGetResult = new GetResult(index, type, id, version, exists, source, expectedFields); + return Tuple.tuple(getResult, expectedGetResult); + } + + private static Tuple,Map> randomGetFields(XContentType xContentType) { + int numFields = randomIntBetween(2, 10); + Map fields = new HashMap<>(numFields); + Map expectedFields = new HashMap<>(numFields); + for (int i = 0; i < numFields; i++) { + Tuple tuple = randomGetField(xContentType); + GetField getField = tuple.v1(); + GetField expectedGetField = tuple.v2(); + fields.put(getField.getName(), getField); + expectedFields.put(expectedGetField.getName(), expectedGetField); + } + return Tuple.tuple(fields, expectedFields); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index b22b48767a8..a93cf0153cf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -71,7 +71,8 @@ public class DateFieldTypeTests extends FieldTypeTestCase { } public void testIsFieldWithinQueryEmptyReader() throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, () -> nowInMillis); + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + () -> nowInMillis); IndexReader reader = new MultiReader(); DateFieldType ft = new DateFieldType(); ft.setName("my_date"); @@ -81,7 +82,8 @@ public class DateFieldTypeTests extends FieldTypeTestCase { private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, DateTimeZone zone, DateMathParser alternateFormat) throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, () -> nowInMillis); + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + () -> nowInMillis); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", randomBoolean(), randomBoolean(), null, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", @@ -128,7 +130,8 @@ public class DateFieldTypeTests extends FieldTypeTestCase { DateFieldType ft2 = new DateFieldType(); ft2.setName("my_date2"); - QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, () -> nowInMillis); + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + () -> nowInMillis); assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context)); IOUtils.close(reader, w, dir); } @@ -163,7 +166,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), - null, null, null, null, null, null, null, null, () -> nowInMillis); + null, null, null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date = "2015-10-12T14:10:55"; @@ -182,7 +185,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), - null, null, null, null, null, null, null, null, () -> nowInMillis); + null, null, null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date1 = "2015-10-12T14:10:55"; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index f41b36068ad..24023281f5e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -43,7 +44,7 @@ public class DynamicTemplateTests extends ESTestCase { assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage()); } - public void testParseUnknownMatchType() { + public void testParseUnknownMatchType() throws IOException { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "short"); templateDef.put("mapping", Collections.singletonMap("store", true)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index af5e2553be7..46ffc4e5864 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -64,7 +64,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, + queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) @@ -112,7 +113,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, + queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") @@ -175,7 +177,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, + queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 7de66511f59..642f770a227 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -237,9 +237,11 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { Supplier queryShardContext = () -> { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; - MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.getIndexAnalyzers(), + indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, queryShardContext); DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, - indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, + queryShardContext); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 42e88015169..7dc29f6b4ce 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -160,12 +160,13 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), containsString("Limit of mapping depth [1] in index [test1] has been exceeded")); } - public void testUnmappedFieldType() { + public void testUnmappedFieldType() throws IOException { MapperService mapperService = createIndex("index").mapperService(); assertThat(mapperService.unmappedFieldType("keyword"), instanceOf(KeywordFieldType.class)); assertThat(mapperService.unmappedFieldType("long"), instanceOf(NumberFieldType.class)); // back compat assertThat(mapperService.unmappedFieldType("string"), instanceOf(KeywordFieldType.class)); + assertWarnings("[unmapped_type:string] should be replaced with [unmapped_type:keyword]"); } public void testMergeWithMap() throws Throwable { @@ -173,13 +174,13 @@ public class MapperServiceTests extends ESSingleNodeTestCase { MapperService mapperService = indexService1.mapperService(); Map> mappings = new HashMap<>(); - mappings.put(MapperService.DEFAULT_MAPPING, MapperService.parseMapping("{}")); + mappings.put(MapperService.DEFAULT_MAPPING, MapperService.parseMapping(xContentRegistry(), "{}")); MapperException e = expectThrows(MapperParsingException.class, () -> mapperService.merge(mappings, MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), startsWith("Failed to parse mapping [" + MapperService.DEFAULT_MAPPING + "]: ")); mappings.clear(); - mappings.put("type1", MapperService.parseMapping("{}")); + mappings.put("type1", MapperService.parseMapping(xContentRegistry(), "{}")); e = expectThrows( MapperParsingException.class, () -> mapperService.merge(mappings, MergeReason.MAPPING_UPDATE, false)); @@ -206,9 +207,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { .startObject("properties") .startObject("field") .field("type", "text") - .startObject("norms") - .field("enabled", false) - .endObject() + .field("norms", false) .endObject() .endObject().endObject().bytes()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 5a5e2ddb509..44484530856 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -37,7 +37,7 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); // first check that for newer versions we throw exception if copy_to is found withing multi field - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); + MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY); try { mapperService.parse("type", new CompressedXContent(mapping.string()), true); fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java new file mode 100644 index 00000000000..8c6ee8da042 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + + +public class MultiFieldIncludeInAllMapperTests extends ESTestCase { + public void testExceptionForIncludeInAllInMultiFields() throws IOException { + XContentBuilder mapping = createMappingWithIncludeInAllInMultiField(); + + // first check that for newer versions we throw exception if include_in_all is found withing multi field + MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY); + Exception e = expectThrows(MapperParsingException.class, () -> + mapperService.parse("type", new CompressedXContent(mapping.string()), true)); + assertEquals("include_in_all in multi fields is not allowed. Found the include_in_all in field [c] which is within a multi field.", + e.getMessage()); + } + + private static XContentBuilder createMappingWithIncludeInAllInMultiField() throws IOException { + XContentBuilder mapping = jsonBuilder(); + mapping.startObject() + .startObject("type") + .startObject("properties") + .startObject("a") + .field("type", "text") + .endObject() + .startObject("b") + .field("type", "text") + .startObject("fields") + .startObject("c") + .field("type", "text") + .field("include_in_all", false) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + return mapping; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java index 9ee9ed16bd6..2b3aad750dd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java @@ -103,7 +103,7 @@ public class ParentFieldMapperTests extends ESSingleNodeTestCase { IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, namedAnalyzer, namedAnalyzer, namedAnalyzer, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); - MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, + MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry(), similarityService, new IndicesModule(emptyList()).getMapperRegistry(), () -> null); XContentBuilder mappingSource = jsonBuilder().startObject().startObject("some_type") .startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 9010164fe03..da822349909 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -74,8 +74,8 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); - QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, null, null, null, - () -> nowInMillis); + QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), null, + null, null, () -> nowInMillis); RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type); ft.setName(FIELDNAME); ft.setIndexOptions(IndexOptions.DOCS); diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 49266ebe9fd..b2d1b957bef 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -19,13 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.index.mapper.MappedFieldType; @@ -40,7 +35,6 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.equalTo; public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { /** Randomly generate either NaN or one of the two infinity values. */ @@ -118,7 +112,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase super.testToQuery()); + QueryShardException e = expectThrows(QueryShardException.class, super::testToQuery); assertEquals("failed to find geo_point field [mapped_geo_point]", e.getMessage()); } @@ -412,7 +406,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + + parseQuery(json); + assertWarnings("Deprecated field [coerce] used, replaced by [validation_method]"); } - public void testFromJsonIgnoreMalformedFails() throws IOException { + public void testFromJsonIgnoreMalformedIsDeprecated() throws IOException { String json = "{\n" + " \"geo_bounding_box\" : {\n" + @@ -444,8 +439,8 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + parseQuery(json); + assertWarnings("Deprecated field [ignore_malformed] used, replaced by [validation_method]"); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 6c92fde6843..dec1ee0a7df 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -316,7 +316,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + parseQuery(json); + assertWarnings("Deprecated field [optimize_bbox] used, replaced by [no replacement: " + + "`optimize_bbox` is no longer supported due to recent improvements]"); } - public void testFromCoerceFails() throws IOException { + public void testFromCoerceIsDeprecated() throws IOException { String json = "{\n" + " \"geo_distance\" : {\n" + @@ -345,11 +346,11 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + parseQuery(json); + assertWarnings("Deprecated field [coerce] used, replaced by [validation_method]"); } - public void testFromJsonIgnoreMalformedFails() throws IOException { + public void testFromJsonIgnoreMalformedIsDeprecated() throws IOException { String json = "{\n" + " \"geo_distance\" : {\n" + @@ -361,8 +362,8 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + parseQuery(json); + assertWarnings("Deprecated field [ignore_malformed] used, replaced by [validation_method]"); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index b77ff3bbdef..6df61d8cb45 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -139,8 +139,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase parseQuery(builder.string())); - assertEquals("Deprecated field [normalize] used, replaced by [use validation_method instead]", e.getMessage()); + parseQuery(builder.string()); + assertWarnings("Deprecated field [normalize] used, replaced by [validation_method]"); } public void testParsingAndToQueryParsingExceptions() throws IOException { @@ -265,9 +265,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - + parseQuery(json); + assertWarnings("Deprecated field [ignore_malformed] used, replaced by [validation_method]"); } public void testFromJsonCoerceDeprecated() throws IOException { @@ -282,8 +281,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); + parseQuery(json); + assertWarnings("Deprecated field [coerce] used, replaced by [validation_method]"); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java index ca402782e8c..cf4b0617ea9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -149,12 +148,9 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase parseQuery(builder.string())); - assertEquals("Deprecated field [type] used, expected [parent_type] instead", e.getMessage()); - - HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY); + HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string()); assertEquals("foo", queryBuilder.type()); - checkWarningHeaders("Deprecated field [type] used, expected [parent_type] instead"); + assertWarnings("Deprecated field [type] used, expected [parent_type] instead"); } public void testToQueryInnerQueryType() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index 5913a038661..de7762246ec 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -164,11 +164,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY); assertEquals(testQuery, parsed); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(contentString)); - checkWarningHeaders("Deprecated field [_type] used, expected [type] instead"); - assertEquals("Deprecated field [_type] used, expected [type] instead", e.getMessage()); - assertEquals(3, e.getLineNumber()); - assertEquals(19, e.getColumnNumber()); + parseQuery(contentString); + assertWarnings("Deprecated field [_type] used, expected [type] instead"); //array of types can also be called types rather than type final String contentString2 = "{\n" + @@ -180,10 +177,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase parsed = (IdsQueryBuilder) parseQuery(contentString2, ParseFieldMatcher.EMPTY); assertEquals(testQuery, parsed); - e = expectThrows(ParsingException.class, () -> parseQuery(contentString2)); - checkWarningHeaders("Deprecated field [types] used, expected [type] instead"); - assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage()); - assertEquals(3, e.getLineNumber()); - assertEquals(19, e.getColumnNumber()); + parseQuery(contentString2); + assertWarnings("Deprecated field [types] used, expected [type] instead"); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index a32eafd850c..a5684863e51 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -318,13 +318,8 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase parseQuery(json, ParseFieldMatcher.STRICT)); - assertThat(e.getMessage(), - containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]")); } public void testLegacyMatchPhraseQuery() throws IOException { @@ -351,16 +346,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase parseQuery(json, ParseFieldMatcher.STRICT)); - assertThat(e.getMessage(), - containsString("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]")); } public void testFuzzinessOnNonStringField() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index 73449dbf5d9..ccf514dd41c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -48,7 +48,8 @@ public class QueryShardContextTests extends ESTestCase { when(mapperService.getIndexSettings()).thenReturn(indexSettings); final long nowInMillis = randomPositiveLong(); QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, null, mapperService, null, null, null, null, null, () -> nowInMillis); + 0, indexSettings, null, null, mapperService, null, null, xContentRegistry(), null, null, null, + () -> nowInMillis); context.setAllowUnmappedFields(false); MappedFieldType fieldType = new TextFieldMapper.TextFieldType(); diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 09627d00d76..b48179e0a73 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; @@ -39,7 +38,6 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; -import org.locationtech.spatial4j.shape.SpatialRelation; import java.io.IOException; import java.util.HashMap; @@ -388,14 +386,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase parseQuery(deprecatedJson, ParseFieldMatcher.STRICT)); - assertEquals("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]", - e.getMessage()); + assertNotNull(parseQuery(deprecatedJson)); + assertWarnings("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]"); } public void testRewriteDateToMatchAll() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index fd6f0670a57..607f3f924d3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -37,7 +37,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test"); IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, null, null, reader, null); + null, null, xContentRegistry(), null, null, reader, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); } @@ -54,7 +54,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, null, null, null, null); + null, null, xContentRegistry(), null, null, null, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT assertEquals(Relation.INTERSECTS, range.getRelation(context)); @@ -73,7 +73,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, null, null, reader, null); + null, null, xContentRegistry(), null, null, reader, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT assertEquals(Relation.DISJOINT, range.getRelation(context)); diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java index 5e971abb2bd..2d7809b7df6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java @@ -147,8 +147,8 @@ public class SimpleQueryParserTests extends ESTestCase { .build(); IndexMetaData indexState = IndexMetaData.builder("index").settings(indexSettings).build(); IndexSettings settings = new IndexSettings(indexState, Settings.EMPTY); - QueryShardContext mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, indicesQueriesRegistry, - null, null, System::currentTimeMillis) { + QueryShardContext mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(), + indicesQueriesRegistry, null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { return new MockFieldMapper.FakeFieldType(); @@ -161,7 +161,7 @@ public class SimpleQueryParserTests extends ESTestCase { assertEquals(new TermQuery(new Term("foo.quote", "bar")), parser.parse("\"bar\"")); // Now check what happens if foo.quote does not exist - mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, indicesQueriesRegistry, + mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(), indicesQueriesRegistry, null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 57e40a03295..358c3006e7d 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -116,9 +116,10 @@ public class RefreshListenersTests extends ESTestCase { // we don't need to notify anybody in this test } }; + TranslogHandler translogHandler = new TranslogHandler(xContentRegistry(), shardId.getIndexName(), logger); EngineConfig config = new EngineConfig(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, shardId, threadPool, indexSettings, null, store, new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()), newMergePolicy(), iwc.getAnalyzer(), - iwc.getSimilarity(), new CodecService(null, logger), eventListener, new TranslogHandler(shardId.getIndexName(), logger), + iwc.getSimilarity(), new CodecService(null, logger), eventListener, translogHandler, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), listeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); engine = new InternalEngine(config); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index bb8943f19d0..4ef26b9357c 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -112,7 +113,7 @@ public class TruncateTranslogIT extends ESIntegTestCase { // Try running it before the shard is closed, it should flip out because it can't acquire the lock try { logger.info("--> running truncate while index is open on [{}]", translogDir.toAbsolutePath()); - ttc.execute(t, options, new HashMap()); + ttc.execute(t, options, null /* TODO: env should be real here, and ttc should actually use it... */); fail("expected the truncate command to fail not being able to acquire the lock"); } catch (Exception e) { assertThat(e.getMessage(), containsString("Failed to lock shard's directory")); @@ -160,7 +161,7 @@ public class TruncateTranslogIT extends ESIntegTestCase { OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b"); logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath()); - ttc.execute(t, options, new HashMap()); + ttc.execute(t, options, null /* TODO: env should be real here, and ttc should actually use it... */); logger.info("--> output:\n{}", t.getOutput()); } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index 6f379e48bae..b568ae9276d 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -136,6 +136,10 @@ public class AnalysisModuleTests extends ModuleTestCase { IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings); assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class))); assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(EnglishAnalyzer.class))); + assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices.", + "setting [index.analysis.analyzer.foobar_search.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices."); } public void testAnalyzerAliasReferencesAlias() throws IOException { @@ -154,6 +158,10 @@ public class AnalysisModuleTests extends ModuleTestCase { assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(GermanAnalyzer.class))); // analyzer types are bound early before we resolve aliases assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(StandardAnalyzer.class))); + assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices.", + "setting [index.analysis.analyzer.foobar_search.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices."); } public void testAnalyzerAliasDefault() throws IOException { @@ -168,6 +176,8 @@ public class AnalysisModuleTests extends ModuleTestCase { IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings); assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class))); assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(KeywordAnalyzer.class))); + assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices."); } public void testAnalyzerAliasMoreThanOnce() throws IOException { @@ -183,6 +193,10 @@ public class AnalysisModuleTests extends ModuleTestCase { AnalysisRegistry newRegistry = getNewRegistry(settings); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> getIndexAnalyzers(newRegistry, settings)); assertEquals("alias [default] is already used by [foobar]", ise.getMessage()); + assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices.", + "setting [index.analysis.analyzer.foobar1.alias] is only allowed on index [test] because it was created before " + + "5.x; analyzer aliases can no longer be created on new indices."); } public void testAnalyzerAliasNotAllowedPost5x() throws IOException { @@ -353,6 +367,8 @@ public class AnalysisModuleTests extends ModuleTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); } + assertWarnings("setting [index.analysis.analyzer.valid_name.alias] is only allowed on index [test] because it was " + + "created before 5.x; analyzer aliases can no longer be created on new indices."); } public void testDeprecatedPositionOffsetGap() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index d1d266a42a4..a057c576f85 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -61,11 +61,11 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -112,7 +112,7 @@ public class ClusterStateChanges extends AbstractComponent { private final TransportClusterRerouteAction transportClusterRerouteAction; private final TransportCreateIndexAction transportCreateIndexAction; - public ClusterStateChanges() { + public ClusterStateChanges(NamedXContentRegistry xContentRegistry) { super(Settings.builder().put(PATH_HOME_SETTING.getKey(), "dummy").build()); allocationService = new AllocationService(settings, new AllocationDeciders(settings, @@ -156,7 +156,7 @@ public class ClusterStateChanges extends AbstractComponent { // services TransportService transportService = new TransportService(settings, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, clusterSettings); - MetaDataIndexUpgradeService metaDataIndexUpgradeService = new MetaDataIndexUpgradeService(settings, null, null) { + MetaDataIndexUpgradeService metaDataIndexUpgradeService = new MetaDataIndexUpgradeService(settings, xContentRegistry, null, null) { // metaData upgrader should do nothing @Override public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { @@ -170,7 +170,7 @@ public class ClusterStateChanges extends AbstractComponent { allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService); MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService, allocationService, new AliasValidator(settings), environment, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool); + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool, xContentRegistry); transportCloseIndexAction = new TransportCloseIndexAction(settings, transportService, clusterService, threadPool, indexStateService, clusterSettings, actionFilters, indexNameExpressionResolver, destructiveOperations); diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 4b21b8820c7..705e50ce92d 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -74,7 +74,7 @@ import static org.mockito.Mockito.when; public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndicesClusterStateServiceTestCase { - private final ClusterStateChanges cluster = new ClusterStateChanges(); + private final ClusterStateChanges cluster = new ClusterStateChanges(xContentRegistry()); public void testRandomClusterStateUpdates() { // we have an IndicesClusterStateService per node in the cluster diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 4e90f5346d2..5967354710c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -192,6 +192,77 @@ public class PipelineStoreTests extends ESTestCase { } } + public void testDeleteUsingWildcard() { + HashMap pipelines = new HashMap<>(); + BytesArray definition = new BytesArray( + "{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}" + ); + pipelines.put("p1", new PipelineConfiguration("p1", definition)); + pipelines.put("p2", new PipelineConfiguration("p2", definition)); + pipelines.put("q1", new PipelineConfiguration("q1", definition)); + IngestMetadata ingestMetadata = new IngestMetadata(pipelines); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousClusterState = clusterState; + clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder() + .putCustom(IngestMetadata.TYPE, ingestMetadata)).build(); + store.innerUpdatePipelines(previousClusterState, clusterState); + assertThat(store.get("p1"), notNullValue()); + assertThat(store.get("p2"), notNullValue()); + assertThat(store.get("q1"), notNullValue()); + + // Delete pipeline matching wildcard + DeletePipelineRequest deleteRequest = new DeletePipelineRequest("p*"); + previousClusterState = clusterState; + clusterState = store.innerDelete(deleteRequest, clusterState); + store.innerUpdatePipelines(previousClusterState, clusterState); + assertThat(store.get("p1"), nullValue()); + assertThat(store.get("p2"), nullValue()); + assertThat(store.get("q1"), notNullValue()); + + // Exception if we used name which does not exist + try { + store.innerDelete(new DeletePipelineRequest("unknown"), clusterState); + fail("exception expected"); + } catch (ResourceNotFoundException e) { + assertThat(e.getMessage(), equalTo("pipeline [unknown] is missing")); + } + + // match all wildcard works on last remaining pipeline + DeletePipelineRequest matchAllDeleteRequest = new DeletePipelineRequest("*"); + previousClusterState = clusterState; + clusterState = store.innerDelete(matchAllDeleteRequest, clusterState); + store.innerUpdatePipelines(previousClusterState, clusterState); + assertThat(store.get("p1"), nullValue()); + assertThat(store.get("p2"), nullValue()); + assertThat(store.get("q1"), nullValue()); + + // match all wildcard does not throw exception if none match + store.innerDelete(matchAllDeleteRequest, clusterState); + } + + public void testDeleteWithExistingUnmatchedPipelines() { + HashMap pipelines = new HashMap<>(); + BytesArray definition = new BytesArray( + "{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}" + ); + pipelines.put("p1", new PipelineConfiguration("p1", definition)); + IngestMetadata ingestMetadata = new IngestMetadata(pipelines); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousClusterState = clusterState; + clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder() + .putCustom(IngestMetadata.TYPE, ingestMetadata)).build(); + store.innerUpdatePipelines(previousClusterState, clusterState); + assertThat(store.get("p1"), notNullValue()); + + DeletePipelineRequest deleteRequest = new DeletePipelineRequest("z*"); + try { + store.innerDelete(deleteRequest, clusterState); + fail("exception expected"); + } catch (ResourceNotFoundException e) { + assertThat(e.getMessage(), equalTo("pipeline [z*] is missing")); + } + } + public void testGetPipelines() { Map configs = new HashMap<>(); configs.put("_id1", new PipelineConfiguration( diff --git a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index ef67006c204..3e3d310b33c 100644 --- a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -52,7 +52,7 @@ public class BaseRestHandlerTests extends ESTestCase { final HashMap params = new HashMap<>(); params.put("consumed", randomAsciiOfLength(8)); params.put("unconsumed", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class))); @@ -74,7 +74,7 @@ public class BaseRestHandlerTests extends ESTestCase { params.put("consumed", randomAsciiOfLength(8)); params.put("unconsumed-first", randomAsciiOfLength(8)); params.put("unconsumed-second", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class))); @@ -108,7 +108,7 @@ public class BaseRestHandlerTests extends ESTestCase { params.put("tokenzier", randomAsciiOfLength(8)); params.put("very_close_to_parametre", randomAsciiOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mock(NodeClient.class))); @@ -142,7 +142,7 @@ public class BaseRestHandlerTests extends ESTestCase { final HashMap params = new HashMap<>(); params.put("consumed", randomAsciiOfLength(8)); params.put("response_param", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); assertTrue(executed.get()); @@ -162,7 +162,7 @@ public class BaseRestHandlerTests extends ESTestCase { params.put("filter_path", randomAsciiOfLength(8)); params.put("pretty", randomAsciiOfLength(8)); params.put("human", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); assertTrue(executed.get()); @@ -196,7 +196,7 @@ public class BaseRestHandlerTests extends ESTestCase { params.put("bytes", randomAsciiOfLength(8)); params.put("size", randomAsciiOfLength(8)); params.put("time", randomAsciiOfLength(8)); - RestRequest request = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); assertTrue(executed.get()); diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index b9f97156df3..6e3d0025eb8 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -40,10 +41,6 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class BytesRestResponseTests extends ESTestCase { @@ -161,7 +158,7 @@ public class BytesRestResponseTests extends ESTestCase { public void testResponseWhenPathContainsEncodingError() throws IOException { final String path = "%a"; - final RestRequest request = new RestRequest(Collections.emptyMap(), path) { + final RestRequest request = new RestRequest(NamedXContentRegistry.EMPTY, Collections.emptyMap(), path) { @Override public Method method() { return null; diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 228561089af..cce5c463759 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -58,7 +58,8 @@ public class RestControllerTests extends ESTestCase { restHeaders.put("header.1", "true"); restHeaders.put("header.2", "true"); restHeaders.put("header.3", "false"); - restController.dispatchRequest(new FakeRestRequest.Builder().withHeaders(restHeaders).build(), null, null, threadContext); + restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(), null, null, + threadContext); assertNull(threadContext.getHeader("header.1")); assertNull(threadContext.getHeader("header.2")); assertEquals("true", threadContext.getHeader("header.3")); @@ -70,10 +71,10 @@ public class RestControllerTests extends ESTestCase { controller.registerHandler(RestRequest.Method.GET, "/trip", new FakeRestHandler(true)); controller.registerHandler(RestRequest.Method.GET, "/do-not-trip", new FakeRestHandler(false)); - assertTrue(controller.canTripCircuitBreaker(new FakeRestRequest.Builder().withPath("/trip").build())); + assertTrue(controller.canTripCircuitBreaker(new FakeRestRequest.Builder(xContentRegistry()).withPath("/trip").build())); // assume trip even on unknown paths - assertTrue(controller.canTripCircuitBreaker(new FakeRestRequest.Builder().withPath("/unknown-path").build())); - assertFalse(controller.canTripCircuitBreaker(new FakeRestRequest.Builder().withPath("/do-not-trip").build())); + assertTrue(controller.canTripCircuitBreaker(new FakeRestRequest.Builder(xContentRegistry()).withPath("/unknown-path").build())); + assertFalse(controller.canTripCircuitBreaker(new FakeRestRequest.Builder(xContentRegistry()).withPath("/do-not-trip").build())); } public void testRegisterAsDeprecatedHandler() { @@ -128,7 +129,7 @@ public class RestControllerTests extends ESTestCase { final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper); restController.registerHandler(RestRequest.Method.GET, "/", handler); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - restController.dispatchRequest(new FakeRestRequest.Builder().build(), null, null, threadContext); + restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, threadContext); assertTrue(wrapperCalled.get()); assertFalse(handlerCalled.get()); } diff --git a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java index 0511b3919b7..7f0184e20c6 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -87,7 +88,7 @@ public class RestRequestTests extends ESTestCase { private static final class ContentRestRequest extends RestRequest { private final BytesArray content; public ContentRestRequest(String content, Map params) { - super(params, "not used by this test"); + super(NamedXContentRegistry.EMPTY, params, "not used by this test"); this.content = new BytesArray(content); } diff --git a/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java index 449f5852cfa..8a78cce8259 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java @@ -83,7 +83,7 @@ public class RestMainActionTests extends ESTestCase { if (prettyPrint == false) { params.put("pretty", String.valueOf(prettyPrint)); } - RestRequest restRequest = new FakeRestRequest.Builder().withParams(params).build(); + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); BytesRestResponse response = RestMainAction.convertMainResponse(mainResponse, restRequest, builder); assertNotNull(response); diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java index 812bc7a85a8..9de530d417d 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java @@ -50,7 +50,7 @@ public class RestNodesStatsActionTests extends ESTestCase { final HashMap params = new HashMap<>(); final String metric = randomAsciiOfLength(64); params.put("metric", metric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -60,7 +60,7 @@ public class RestNodesStatsActionTests extends ESTestCase { public void testUnrecognizedMetricDidYouMean() throws IOException { final HashMap params = new HashMap<>(); params.put("metric", "os,transprot,unrecognized"); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -75,7 +75,7 @@ public class RestNodesStatsActionTests extends ESTestCase { final HashMap params = new HashMap<>(); final String metric = randomSubsetOf(1, RestNodesStatsAction.METRICS.keySet()).get(0); params.put("metric", "_all," + metric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -87,7 +87,7 @@ public class RestNodesStatsActionTests extends ESTestCase { params.put("metric", "indices"); final String indexMetric = randomAsciiOfLength(64); params.put("index_metric", indexMetric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -98,7 +98,7 @@ public class RestNodesStatsActionTests extends ESTestCase { final HashMap params = new HashMap<>(); params.put("metric", "indices"); params.put("index_metric", "indexing,stroe,unrecognized"); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -116,7 +116,7 @@ public class RestNodesStatsActionTests extends ESTestCase { params.put("metric", randomSubsetOf(1, metrics).get(0)); final String indexMetric = randomSubsetOf(1, RestNodesStatsAction.FLAGS.keySet()).get(0); params.put("index_metric", indexMetric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -131,7 +131,7 @@ public class RestNodesStatsActionTests extends ESTestCase { params.put("metric", "_all"); final String indexMetric = randomSubsetOf(1, RestNodesStatsAction.FLAGS.keySet()).get(0); params.put("index_metric", indexMetric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_nodes/stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index d7d5d12e91b..6bf5c515df9 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -93,7 +93,7 @@ public class RestAnalyzeActionTests extends ESTestCase { public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { RestAnalyzeAction action = new RestAnalyzeAction(Settings.EMPTY, mock(RestController.class)); - RestRequest request = new FakeRestRequest.Builder().withContent(new BytesArray("{invalid_json}")).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray("{invalid_json}")).build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, null, null)); assertThat(e.getMessage(), equalTo("Failed to parse request body")); } diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java index bf7be8f09d2..feac1672c11 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java @@ -48,7 +48,7 @@ public class RestIndicesStatsActionTests extends ESTestCase { final HashMap params = new HashMap<>(); final String metric = randomAsciiOfLength(64); params.put("metric", metric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -58,7 +58,7 @@ public class RestIndicesStatsActionTests extends ESTestCase { public void testUnrecognizedMetricDidYouMean() throws IOException { final HashMap params = new HashMap<>(); params.put("metric", "request_cache,fieldata,unrecognized"); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); @@ -73,7 +73,7 @@ public class RestIndicesStatsActionTests extends ESTestCase { final HashMap params = new HashMap<>(); final String metric = randomSubsetOf(1, RestIndicesStatsAction.METRICS.keySet()).get(0); params.put("metric", "_all," + metric); - final RestRequest request = new FakeRestRequest.Builder().withPath("/_stats").withParams(params).build(); + final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> action.prepareRequest(request, mock(NodeClient.class))); diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java index ba3ec55ab2a..cf42f2b1b3c 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java @@ -253,7 +253,7 @@ public class RestTableTests extends ESTestCase { } private RestResponse assertResponseContentType(Map headers, String mediaType) throws Exception { - FakeRestRequest requestWithAcceptHeader = new FakeRestRequest.Builder().withHeaders(headers).build(); + FakeRestRequest requestWithAcceptHeader = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(headers).build(); table.startRow(); table.addCell("foo"); table.addCell("foo"); diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 6c3e3f0bc45..2e9644191f6 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -108,8 +109,8 @@ public class SearchModuleTests extends ModuleTestCase { GaussDecayFunctionBuilder.PARSER)); } }; - expectThrows(IllegalArgumentException.class, - () -> new SearchModule(Settings.EMPTY, false, singletonList(registersDupeScoreFunction))); + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, singletonList(registersDupeScoreFunction)); + expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(searchModule.getNamedXContents())); SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() { @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java index c232068fffd..ca4b20c6a5b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -100,7 +100,7 @@ public class ExtendedBoundsTests extends ESTestCase { SearchContext context = mock(SearchContext.class); QueryShardContext qsc = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null, - null, null, null, null, () -> now); + null, xContentRegistry(), null, null, null, () -> now); when(context.getQueryShardContext()).thenReturn(qsc); FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime"); DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 8ec873704d6..994c39d6165 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -318,12 +318,13 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { public void testParseIndicesBoost() throws IOException { { String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}"; - try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers); assertEquals(2, searchSourceBuilder.indexBoosts().size()); assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); + assertWarnings("Object format in indices_boost is deprecated, please use array format instead"); } } @@ -334,7 +335,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { " { \"bar\" : 2.0 },\n" + " { \"baz\" : 3.0 }\n" + " ]}"; - try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers); assertEquals(3, searchSourceBuilder.indexBoosts().size()); @@ -382,7 +383,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { } private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException { - try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser), searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers)); assertEquals(expectedErrorMessage, e.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 07a70c91ef9..e494bcf981e 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -265,8 +265,8 @@ public class HighlightBuilderTests extends ESTestCase { Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter - QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, indicesQueriesRegistry, - null, null, System::currentTimeMillis) { + QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), + indicesQueriesRegistry, null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index be7e69ee76d..76474422961 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -48,7 +48,6 @@ import org.elasticsearch.search.AbstractSearchTestCase; import java.io.IOException; import java.util.Base64; -import java.util.function.Function; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.Matchers.containsString; @@ -163,9 +162,12 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { } public QueryBuilder aliasFilter(IndexMetaData indexMetaData, String... aliasNames) { - Function contextFactory = (p) -> new QueryParseContext(queriesRegistry, - p, new ParseFieldMatcher(Settings.EMPTY)); - return ShardSearchRequest.parseAliasFilter(contextFactory, indexMetaData, aliasNames); + ShardSearchRequest.FilterParser filterParser = bytes -> { + try (XContentParser parser = XContentFactory.xContent(bytes).createParser(xContentRegistry(), bytes)) { + return new QueryParseContext(queriesRegistry, parser, new ParseFieldMatcher(Settings.EMPTY)).parseInnerQueryBuilder(); + } + }; + return ShardSearchRequest.parseAliasFilter(filterParser, indexMetaData, aliasNames); } // BWC test for changes from #20916 @@ -198,7 +200,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { IndexSettings indexSettings = new IndexSettings(indexMetadata.build(), Settings.EMPTY); final long nowInMillis = randomPositiveLong(); QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, null, null, null, null, queriesRegistry, null, null, () -> nowInMillis); + 0, indexSettings, null, null, null, null, null, xContentRegistry(), queriesRegistry, null, null, () -> nowInMillis); readRequest.rewrite(context); QueryBuilder queryBuilder = readRequest.filteringAliases(); assertEquals(queryBuilder, QueryBuilders.boolQuery() diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 22f2dd53808..160e5393391 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -138,8 +138,8 @@ public class QueryRescoreBuilderTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer - QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, indicesQueriesRegistry, - null, null, () -> nowInMillis) { + QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), + indicesQueriesRegistry, null, null, () -> nowInMillis) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java b/core/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java index d386f73adf5..df89bda88d8 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java @@ -48,7 +48,7 @@ public class RestClearScrollActionTests extends ESTestCase { public void testParseClearScrollRequestWithInvalidJsonThrowsException() throws Exception { RestClearScrollAction action = new RestClearScrollAction(Settings.EMPTY, mock(RestController.class)); - RestRequest request = new FakeRestRequest.Builder().withContent(new BytesArray("{invalid_json}")).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray("{invalid_json}")).build(); Exception e = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, null)); assertThat(e.getMessage(), equalTo("Failed to parse request body")); } diff --git a/core/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java b/core/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java index f4bb364516a..7d2a5024cc8 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java @@ -52,7 +52,7 @@ public class RestSearchScrollActionTests extends ESTestCase { public void testParseSearchScrollRequestWithInvalidJsonThrowsException() throws Exception { RestSearchScrollAction action = new RestSearchScrollAction(Settings.EMPTY, mock(RestController.class)); - RestRequest request = new FakeRestRequest.Builder().withContent(new BytesArray("{invalid_json}")).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray("{invalid_json}")).build(); Exception e = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, null)); assertThat(e.getMessage(), equalTo("Failed to parse request body")); } diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index d545a082b55..63e65a91062 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -25,7 +25,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -211,7 +210,7 @@ public abstract class AbstractSortTestCase> extends EST }); long nowInMillis = randomPositiveLong(); return new QueryShardContext(0, idxSettings, bitsetFilterCache, ifds, null, null, scriptService, - indicesQueriesRegistry, null, null, () -> nowInMillis) { + xContentRegistry(), indicesQueriesRegistry, null, null, () -> nowInMillis) { @Override public MappedFieldType fieldMapper(String name) { return provideMappedFieldType(name); @@ -250,7 +249,9 @@ public abstract class AbstractSortTestCase> extends EST @SuppressWarnings("unchecked") private T copy(T original) throws IOException { - return copyWriteable(original, namedWriteableRegistry, - (Writeable.Reader) namedWriteableRegistry.getReader(SortBuilder.class, original.getWriteableName())); + /* The cast below is required to make Java 9 happy. Java 8 infers the T in copyWriterable to be the same as AbstractSortTestCase's + * T but Java 9 infers it to be SortBuilder. */ + return (T) copyWriteable(original, namedWriteableRegistry, + namedWriteableRegistry.getReader(SortBuilder.class, original.getWriteableName())); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 131d19f600d..ec3d2a01754 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -267,17 +267,15 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase GeoDistanceSortBuilder.fromXContent(context, "")); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.EMPTY); + GeoDistanceSortBuilder.fromXContent(context, ""); + assertWarnings("Deprecated field [coerce] used, replaced by [validation_method]"); } public void testIgnoreMalformedIsDeprecated() throws IOException { @@ -288,17 +286,15 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase GeoDistanceSortBuilder.fromXContent(context, "")); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.EMPTY); + GeoDistanceSortBuilder.fromXContent(context, ""); + assertWarnings("Deprecated field [ignore_malformed] used, replaced by [validation_method]"); } public void testSortModeSumIsRejectedInJSON() throws IOException { @@ -455,8 +451,8 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase parse(sortBuilder)); - assertEquals("Deprecated field [sort_mode] used, expected [mode] instead", ex.getMessage()); + parse(sortBuilder); + assertWarnings("Deprecated field [sort_mode] used, expected [mode] instead"); } private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java index 581280a5a9b..cdacabdd66a 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java @@ -79,7 +79,7 @@ public class SnapshotRequestsTests extends ESTestCase { BytesReference bytes = builder.endObject().bytes(); - request.source(XContentHelper.createParser(bytes).mapOrdered()); + request.source(XContentHelper.convertToMap(bytes, true).v2()); assertEquals("test-repo", request.repository()); assertEquals("test-snap", request.snapshot()); @@ -137,7 +137,7 @@ public class SnapshotRequestsTests extends ESTestCase { BytesReference bytes = builder.endObject().bytes(); - request.source(XContentHelper.createParser(bytes).mapOrdered()); + request.source(XContentHelper.convertToMap(bytes, true).v2()); assertEquals("test-repo", request.repository()); assertEquals("test-snap", request.snapshot()); diff --git a/distribution/src/main/resources/bin/elasticsearch-service.bat b/distribution/src/main/resources/bin/elasticsearch-service.bat index 173ee75e8c0..d06de4c5bea 100644 --- a/distribution/src/main/resources/bin/elasticsearch-service.bat +++ b/distribution/src/main/resources/bin/elasticsearch-service.bat @@ -4,7 +4,7 @@ SETLOCAL enabledelayedexpansion TITLE Elasticsearch Service ${project.version} IF DEFINED JAVA_HOME ( - SET JAVA="%JAVA_HOME%"\bin\java.exe + SET JAVA="%JAVA_HOME%\bin\java.exe" ) ELSE ( FOR %%I IN (java.exe) DO set JAVA=%%~$PATH:I ) @@ -121,19 +121,19 @@ echo Installing service : "%SERVICE_ID%" echo Using JAVA_HOME (%ARCH%): "%JAVA_HOME%" rem Check JVM server dll first -if exist "%JAVA_HOME%"\jre\bin\server\jvm.dll ( +if exist "%JAVA_HOME%\jre\bin\server\jvm.dll" ( set JVM_DLL=\jre\bin\server\jvm.dll goto foundJVM ) rem Check 'server' JRE (JRE installed on Windows Server) -if exist "%JAVA_HOME%"\bin\server\jvm.dll ( +if exist "%JAVA_HOME%\bin\server\jvm.dll" ( set JVM_DLL=\bin\server\jvm.dll goto foundJVM ) rem Fallback to 'client' JRE -if exist "%JAVA_HOME%"\bin\client\jvm.dll ( +if exist "%JAVA_HOME%\bin\client\jvm.dll" ( set JVM_DLL=\bin\client\jvm.dll echo Warning: JAVA_HOME points to a JRE and not JDK installation; a client (not a server^) JVM will be used... ) else ( diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index d1f443aa8a3..16293ca406a 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -1,7 +1,7 @@ @echo off IF DEFINED JAVA_HOME ( - set JAVA="%JAVA_HOME%"\bin\java.exe + set JAVA="%JAVA_HOME%\bin\java.exe" ) ELSE ( FOR %%I IN (java.exe) DO set JAVA=%%~$PATH:I ) diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index 186d707379e..1d1ae314fcb 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -1,21 +1,8 @@ -[[java-query-percolate-query]] -==== Percolate query - -See: {ref}/query-dsl-percolate-query.html[Percolate Query] - -In order to use the `percolate` query from the Java API your -the percolator module dependency should be on the classpath and -the transport client should be loaded with the percolator plugin: - [source,java] -------------------------------------------------- -TransportClient transportClient = TransportClient.builder() - .settings(Settings.builder().put("node.name", "node")) - .addPlugin(PercolatorPlugin.class) - .build(); -transportClient.addTransportAddress( - new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) -); +Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build(); +TransportClient client = new PreBuiltTransportClient(settings); +client.addTransportAddress(new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300))); -------------------------------------------------- Before the `percolate` query can be used an `percolator` mapping should be added and @@ -26,6 +13,7 @@ a document containing a percolator query should be indexed: // create an index with a percolator field with the name 'query': client.admin().indices().prepareCreate("myIndexName") .addMapping("query", "query", "type=percolator") + .addMapping("docs", "content", "type=text") .get(); //This is the query we're registering in the percolator @@ -37,7 +25,7 @@ client.prepareIndex("myIndexName", "query", "myDesignatedQueryName") .startObject() .field("query", qb) // Register the query .endObject()) - .setRefresh(true) // Needed when the query shall be available immediately + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) // Needed when the query shall be available immediately .get(); -------------------------------------------------- @@ -51,13 +39,14 @@ code: -------------------------------------------------- //Build a document to check against the percolator XContentBuilder docBuilder = XContentFactory.jsonBuilder().startObject(); -docBuilder.field("doc").startObject(); //This is needed to designate the document docBuilder.field("content", "This is amazing!"); -docBuilder.endObject(); //End of the doc field docBuilder.endObject(); //End of the JSON root object + +PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", docBuilder.bytes()); + // Percolate, by executing the percolator query in the query dsl: SearchResponse response = client().prepareSearch("myIndexName") - .setQuery(QueryBuilders.percolateQuery("query", ""myDocumentType", docBuilder.bytes())) + .setQuery(percolateQuery)) .get(); //Iterate over the results for(SearchHit hit : response.getHits()) { diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index a0a4d70cbcf..ff29e544c96 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -53,6 +53,7 @@ The node must be stopped before removing the plugin. | `target_field` | no | attachment | The field that will hold the attachment information | `indexed_chars` | no | 100000 | The number of chars being used for extraction to prevent huge fields. Use `-1` for no limit. | `properties` | no | all | Properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language` +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== For example, this: diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 95e7a0442a4..3665026c01a 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -54,6 +54,7 @@ The node must be stopped before removing the plugin. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. | `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb.gz and GeoLite2-Country.mmdb.gz files. | `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== *Depends on what is available in `database_field`: diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc index 250051d17d5..a99e0b3a83a 100644 --- a/docs/plugins/ingest-user-agent.asciidoc +++ b/docs/plugins/ingest-user-agent.asciidoc @@ -43,11 +43,12 @@ The node must be stopped before removing the plugin. .User-agent options [options="header"] |====== -| Name | Required | Default | Description -| `field` | yes | - | The field containing the user agent string. -| `target_field` | no | user_agent | The field that will be filled with the user agent details. -| `regex_file` | no | - | The name of the file in the `config/ingest-user-agent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-user-agent will use the regexes.yaml from uap-core it ships with (see below). +| Name | Required | Default | Description +| `field` | yes | - | The field containing the user agent string. +| `target_field` | no | user_agent | The field that will be filled with the user agent details. +| `regex_file` | no | - | The name of the file in the `config/ingest-user-agent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-user-agent will use the regexes.yaml from uap-core it ships with (see below). | `properties` | no | [`name`, `major`, `minor`, `patch`, `build`, `os`, `os_name`, `os_major`, `os_minor`, `device`] | Controls what properties are added to `target_field`. +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== Here is an example that adds the user agent details to the `user_agent` field based on the `agent` field: diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 3cdcfd5d2cd..8b6a4478115 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -143,7 +143,7 @@ This should give a small response that makes it both easy and inexpensive to par [[delete-pipeline-api]] === Delete Pipeline API -The delete pipeline API deletes pipelines by ID. +The delete pipeline API deletes pipelines by ID or wildcard match (`my-*`, `*`). [source,js] -------------------------------------------------- @@ -152,6 +152,36 @@ DELETE _ingest/pipeline/my-pipeline-id // CONSOLE // TEST[continued] +//// +Hidden setup for wildcard test: +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/wild-one +{ + "description" : "first pipeline to be wildcard deleted", + "processors" : [ ] +} + +PUT _ingest/pipeline/wild-two +{ + "description" : "second pipeline to be wildcard deleted", + "processors" : [ ] +} + +DELETE _ingest/pipeline/* +-------------------------------------------------- +// CONSOLE + +Hidden expected response: +[source,js] +-------------------------------------------------- +{ +"acknowledged": true +} +-------------------------------------------------- +// TESTRESPONSE +//// + [[simulate-pipeline-api]] === Simulate Pipeline API @@ -1485,6 +1515,38 @@ Converts a JSON string into a structured JSON object. } -------------------------------------------------- +[[kv-processor]] +=== KV Processor +This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. + +For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED`, you can parse those automatically by configuring: + + +[source,js] +-------------------------------------------------- +{ + "kv": { + "field": "message", + "field_split": " ", + "value_split": "=" + } +} +-------------------------------------------------- + +[[kv-options]] +.Kv Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to be parsed +| `field_split` | yes | - | Regex pattern to use for splitting key-value pairs +| `value_split` | yes | - | Regex pattern to use for splitting the key from the value within a key-value pair +| `target_field` | no | `null` | The field to insert the extracted keys into. Defaults to the root of the document +| `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys +| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document +|====== + + [[lowercase-processor]] === Lowercase Processor Converts a string to its lowercase equivalent. diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 82d316dfa62..8b6c8e8bed8 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -63,6 +63,7 @@ public class IngestCommonPlugin extends Plugin implements IngestPlugin { processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)); processors.put(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory()); processors.put(JsonProcessor.TYPE, new JsonProcessor.Factory()); + processors.put(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory()); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index cb734e7bef4..d4c7efe0d50 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -19,7 +19,8 @@ package org.elasticsearch.ingest.common; -import com.fasterxml.jackson.core.JsonParseException; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; @@ -65,7 +66,7 @@ public final class JsonProcessor extends AbstractProcessor { public void execute(IngestDocument document) throws Exception { String stringValue = document.getFieldValue(field, String.class); try { - Map mapValue = JsonXContent.jsonXContent.createParser(stringValue).map(); + Map mapValue = XContentHelper.convertToMap(JsonXContent.jsonXContent, stringValue, false); if (addToRoot) { for (Map.Entry entry : mapValue.entrySet()) { document.setFieldValue(entry.getKey(), entry.getValue()); @@ -73,7 +74,7 @@ public final class JsonProcessor extends AbstractProcessor { } else { document.setFieldValue(targetField, mapValue); } - } catch (JsonParseException e) { + } catch (ElasticsearchParseException e) { throw new IllegalArgumentException(e); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java new file mode 100644 index 00000000000..d1f6eb7caf9 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys. + */ +public final class KeyValueProcessor extends AbstractProcessor { + + public static final String TYPE = "kv"; + + private final String field; + private final String fieldSplit; + private final String valueSplit; + private final List includeKeys; + private final String targetField; + private final boolean ignoreMissing; + + KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, List includeKeys, + String targetField, boolean ignoreMissing) { + super(tag); + this.field = field; + this.targetField = targetField; + this.fieldSplit = fieldSplit; + this.valueSplit = valueSplit; + this.includeKeys = includeKeys; + this.ignoreMissing = ignoreMissing; + } + + String getField() { + return field; + } + + String getFieldSplit() { + return fieldSplit; + } + + String getValueSplit() { + return valueSplit; + } + + List getIncludeKeys() { + return includeKeys; + } + + String getTargetField() { + return targetField; + } + + boolean isIgnoreMissing() { + return ignoreMissing; + } + + public void append(IngestDocument document, String targetField, String value) { + if (document.hasField(targetField)) { + document.appendFieldValue(targetField, value); + } else { + document.setFieldValue(targetField, value); + } + } + + @Override + public void execute(IngestDocument document) { + String oldVal = document.getFieldValue(field, String.class, ignoreMissing); + + if (oldVal == null && ignoreMissing) { + return; + } else if (oldVal == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs."); + } + + String fieldPathPrefix = (targetField == null) ? "" : targetField + "."; + Arrays.stream(oldVal.split(fieldSplit)) + .map((f) -> f.split(valueSplit, 2)) + .filter((p) -> includeKeys == null || includeKeys.contains(p[0])) + .forEach((p) -> append(document, fieldPathPrefix + p[0], p[1])); + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory implements Processor.Factory { + @Override + public KeyValueProcessor create(Map registry, String processorTag, + Map config) throws Exception { + String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); + String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field"); + String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split"); + String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split"); + List includeKeys = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys"); + if (includeKeys != null) { + includeKeys = Collections.unmodifiableList(includeKeys); + } + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, targetField, ignoreMissing); + } + } +} diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 2b2c521417c..5d087ebbc12 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -60,9 +60,9 @@ public class JsonProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); - assertThat(exception.getMessage(), equalTo("com.fasterxml.jackson.core.JsonParseException: Unrecognized token" + - " 'invalid': was expecting ('true', 'false' or 'null')\n" + - " at [Source: invalid json; line: 1, column: 8]")); + assertThat(exception.getCause().getCause().getMessage(), equalTo("Unrecognized token" + + " 'invalid': was expecting ('true', 'false' or 'null')\n" + + " at [Source: invalid json; line: 1, column: 8]")); } public void testFieldMissing() { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java new file mode 100644 index 00000000000..4dc4e082655 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class KeyValueProcessorFactoryTests extends ESTestCase { + + public void testCreateWithDefaults() throws Exception { + KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("field_split", "&"); + config.put("value_split", "="); + String processorTag = randomAsciiOfLength(10); + KeyValueProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("field1")); + assertThat(processor.getFieldSplit(), equalTo("&")); + assertThat(processor.getValueSplit(), equalTo("=")); + assertThat(processor.getIncludeKeys(), is(nullValue())); + assertThat(processor.getTargetField(), is(nullValue())); + assertFalse(processor.isIgnoreMissing()); + } + + public void testCreateWithAllFieldsSet() throws Exception { + KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("field_split", "&"); + config.put("value_split", "="); + config.put("target_field", "target"); + config.put("include_keys", Arrays.asList("a", "b")); + config.put("ignore_missing", true); + String processorTag = randomAsciiOfLength(10); + KeyValueProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("field1")); + assertThat(processor.getFieldSplit(), equalTo("&")); + assertThat(processor.getValueSplit(), equalTo("=")); + assertThat(processor.getIncludeKeys(), equalTo(Arrays.asList("a", "b"))); + assertThat(processor.getTargetField(), equalTo("target")); + assertTrue(processor.isIgnoreMissing()); + } + + public void testCreateWithMissingField() { + KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); + Map config = new HashMap<>(); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, + () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("[field] required property is missing")); + } + + public void testCreateWithMissingFieldSplit() { + KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, + () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("[field_split] required property is missing")); + } + + public void testCreateWithMissingValueSplit() { + KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("field_split", "&"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, + () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("[value_split] required property is missing")); + } +} diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java new file mode 100644 index 00000000000..2d5f71bf54e --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.Matchers.equalTo; + +public class KeyValueProcessorTests extends ESTestCase { + + public void test() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "&", "=", null, "target", false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + public void testRootTarget() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "myField", "&", "=", null, null, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + public void testKeySameAsSourceField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + ingestDocument.setFieldValue("first", "first=hello"); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "first", "&", "=", null, null, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello"))); + } + + public void testIncludeKeys() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "&", "=", + Collections.singletonList("first"), "target", false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertFalse(ingestDocument.hasField("target.second")); + } + + public void testMissingField() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "unknown", "&", "=", null, "target", false); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]")); + } + + public void testNullValueWithIgnoreMissing() throws Exception { + String fieldName = RandomDocumentPicks.randomFieldName(random()); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap(fieldName, null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "", "", null, "target", true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNonExistentWithIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "unknown", "", "", null, "target", true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } +} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml index 87c1f5a8abf..a58c329a7c5 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml @@ -20,12 +20,13 @@ - match: { nodes.$master.ingest.processors.8.type: gsub } - match: { nodes.$master.ingest.processors.9.type: join } - match: { nodes.$master.ingest.processors.10.type: json } - - match: { nodes.$master.ingest.processors.11.type: lowercase } - - match: { nodes.$master.ingest.processors.12.type: remove } - - match: { nodes.$master.ingest.processors.13.type: rename } - - match: { nodes.$master.ingest.processors.14.type: script } - - match: { nodes.$master.ingest.processors.15.type: set } - - match: { nodes.$master.ingest.processors.16.type: sort } - - match: { nodes.$master.ingest.processors.17.type: split } - - match: { nodes.$master.ingest.processors.18.type: trim } - - match: { nodes.$master.ingest.processors.19.type: uppercase } + - match: { nodes.$master.ingest.processors.11.type: kv } + - match: { nodes.$master.ingest.processors.12.type: lowercase } + - match: { nodes.$master.ingest.processors.13.type: remove } + - match: { nodes.$master.ingest.processors.14.type: rename } + - match: { nodes.$master.ingest.processors.15.type: script } + - match: { nodes.$master.ingest.processors.16.type: set } + - match: { nodes.$master.ingest.processors.17.type: sort } + - match: { nodes.$master.ingest.processors.18.type: split } + - match: { nodes.$master.ingest.processors.19.type: trim } + - match: { nodes.$master.ingest.processors.20.type: uppercase } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/150_kv.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/150_kv.yaml new file mode 100644 index 00000000000..a1ecf10278c --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/150_kv.yaml @@ -0,0 +1,43 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + +--- +"Test KV Processor": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "kv" : { + "field" : "foo", + "field_split": " ", + "value_split": "=" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "1" + body: { + foo: "goodbye=everybody hello=world" + } + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.goodbye: "everybody" } + - match: { _source.hello: "world" } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index b12a65f8fb6..b041e0664bb 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -50,6 +50,46 @@ teardown: catch: missing ingest.get_pipeline: id: "my_pipeline" +--- +"Test wildcard pipeline delete": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "_value" + } + } + ] + } + - match: { acknowledged: true } + + - do: + ingest.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.description: "_description" } + + - do: + ingest.delete_pipeline: + id: "my_*" + - match: { acknowledged: true } + + - do: + catch: missing + ingest.get_pipeline: + id: "my_pipeline" + + - do: + catch: missing + ingest.delete_pipeline: + id: "my_*" + - match: { "error.type": "resource_not_found_exception" } + - match: { "error.reason": "pipeline [my_*] is missing" } --- "Test Get All Pipelines": diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index 7eb134f74ff..ecaa360bc4e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.search.RestMultiSearchAction; @@ -67,7 +66,6 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { * Parses a {@link RestRequest} body and returns a {@link MultiSearchTemplateRequest} */ public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex) throws IOException { - MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); RestMultiSearchAction.parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, bytes) -> { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java index 59e2e8eac52..a49c96403d3 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java @@ -118,7 +118,8 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder templateParams = new HashMap<>(); - templateParams.put("P_Keyword1", "dev"); + Map templateParams = new HashMap<>(); + templateParams.put("P_Keyword1", "dev"); - ParsingException e = expectThrows(ParsingException.class, () -> new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("testindex").types("test")) - .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get()); - assertThat(e.getMessage(), containsString("[match] query does not support type ooophrase_prefix")); + ParsingException e = expectThrows(ParsingException.class, () -> new SearchTemplateRequestBuilder(client()) + .setRequest(new SearchRequest("testindex").types("test")) + .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) + .get()); + assertThat(e.getMessage(), containsString("[match_phrase_prefix] query does not support [unsupported]")); - assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(MustacheScriptEngineService.NAME) - .setId("git01") - .setSource(new BytesArray("{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + - "\"type\": \"phrase_prefix\"}}}}"))); + assertAcked(client().admin().cluster().preparePutStoredScript() + .setScriptLang(MustacheScriptEngineService.NAME) + .setId("git01") + .setSource(new BytesArray("{\"query\": {\"match_phrase_prefix\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"}}}}"))); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("testindex").types("test")) - .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 1); - } + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) + .setRequest(new SearchRequest("testindex").types("test")) + .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) + .get(); + assertHitCount(searchResponse.getResponse(), 1); } public void testIndexedTemplateWithArray() throws Exception { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java index 34e06410a78..4334de090c2 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java @@ -59,11 +59,12 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase 0) { - try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) { + // EMPTY is safe here because we don't call namedObject + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, data.slice(from, nextMarker - from))) { // Move to START_OBJECT, if token is null, its an empty data XContentParser.Token token = parser.nextToken(); if (token != null) { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 24243081f23..ff3a9bf1ee6 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -57,7 +57,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperForType; @@ -197,16 +196,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder 0) { XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(); - try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { + try (XContentParser sourceParser = xContent.createParser(context.getXContentRegistry(), qbSource.bytes, + qbSource.offset, qbSource.length)) { return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); } } else { @@ -509,7 +500,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder 0) { - try (XContentParser parser = XContentHelper.createParser(percolateRequest.source())) { + try (XContentParser parser = XContentHelper.createParser(xContentRegistry, percolateRequest.source())) { String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { @@ -209,7 +213,7 @@ public class TransportPercolateAction extends HandledTransportAction, A extends GenericAction> extends AbstractBaseReindexRestHandler { - protected AbstractBulkByQueryRestHandler(Settings settings, SearchRequestParsers searchRequestParsers, - ClusterService clusterService, A action) { + protected AbstractBulkByQueryRestHandler(Settings settings, SearchRequestParsers searchRequestParsers, ClusterService clusterService, + A action) { super(settings, searchRequestParsers, clusterService, action); } @@ -109,7 +109,7 @@ public abstract class AbstractBulkByQueryRestHandler< } try (XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType())) { - return parser.contentType().xContent().createParser(builder.map(body).bytes()); + return parser.contentType().xContent().createParser(parser.getXContentRegistry(), builder.map(body).bytes()); } } finally { parser.close(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java index ba6e6a942ba..8e8baacd2e7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -39,8 +39,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { @Inject - public RestDeleteByQueryAction(Settings settings, RestController controller, - SearchRequestParsers searchRequestParsers, ClusterService clusterService) { + public RestDeleteByQueryAction(Settings settings, RestController controller, SearchRequestParsers searchRequestParsers, + ClusterService clusterService) { super(settings, searchRequestParsers, clusterService, DeleteByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_delete_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index ad9cd334b88..9ae9e6ba876 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -80,7 +80,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler { @Inject - public RestUpdateByQueryAction(Settings settings, RestController controller, - SearchRequestParsers searchRequestParsers, ClusterService clusterService) { + public RestUpdateByQueryAction(Settings settings, RestController controller, SearchRequestParsers searchRequestParsers, + ClusterService clusterService) { super(settings, searchRequestParsers, clusterService, UpdateByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_update_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index 9ecb4700f58..8828878563c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -107,7 +108,9 @@ final class RemoteRequestBuilders { } static HttpEntity initialSearchEntity(BytesReference query) { - try (XContentBuilder entity = JsonXContent.contentBuilder(); XContentParser queryParser = XContentHelper.createParser(query)) { + // EMPTY is safe here because we're not calling namedObject + try (XContentBuilder entity = JsonXContent.contentBuilder(); + XContentParser queryParser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, query)) { entity.startObject(); entity.field("query"); /* diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 97276f1b644..8681173fe4e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -166,7 +167,19 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { //auto-detect as a fallback xContentType = XContentFactory.xContentType(content); } - try(XContentParser xContentParser = xContentType.xContent().createParser(content)) { + if (xContentType == null) { + try { + throw new ElasticsearchException( + "Can't detect content type for response: " + bodyMessage(response.getEntity())); + } catch (IOException e) { + ElasticsearchException ee = new ElasticsearchException("Error extracting body from response"); + ee.addSuppressed(e); + throw ee; + } + } + // EMPTY is safe here because we don't call namedObject + try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, + content)) { parsedResponse = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); } } catch (IOException e) { @@ -220,18 +233,20 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { messagePrefix = "Couldn't extract status [" + statusCode + "]. "; status = RestStatus.INTERNAL_SERVER_ERROR; } - String message; - if (entity == null) { - message = messagePrefix + "No error body."; - } else { - try { - message = messagePrefix + "body=" + EntityUtils.toString(entity); - } catch (IOException ioe) { - ElasticsearchStatusException e = new ElasticsearchStatusException(messagePrefix + "Failed to extract body.", status, cause); - e.addSuppressed(ioe); - return e; - } + try { + return new ElasticsearchStatusException(messagePrefix + bodyMessage(entity), status, cause); + } catch (IOException ioe) { + ElasticsearchStatusException e = new ElasticsearchStatusException(messagePrefix + "Failed to extract body.", status, cause); + e.addSuppressed(ioe); + return e; + } + } + + static String bodyMessage(@Nullable HttpEntity entity) throws IOException { + if (entity == null) { + return "No error body."; + } else { + return "body=" + EntityUtils.toString(entity); } - return new ElasticsearchStatusException(message, status, cause); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index a5c68c768b2..237292bed3b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -136,7 +136,7 @@ public class RestReindexActionTests extends ESTestCase { SearchRequestParsers parsers = new SearchRequestParsers(new IndicesQueriesRegistry(), null, null, null); RestReindexAction action = new RestReindexAction(Settings.EMPTY, mock(RestController.class), parsers, null); - FakeRestRequest.Builder request = new FakeRestRequest.Builder(); + FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry()); try (XContentBuilder body = JsonXContent.contentBuilder().prettyPrint()) { body.startObject(); { body.startObject("source"); { diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml index 86b4bcc1132..ab47a306f57 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml @@ -301,7 +301,8 @@ refresh: true - do: - catch: /Connection refused/ + # sometimes IIS is listening on port 0. In that case we fail in other ways and this test isn't useful. + catch: /connect_exception|IIS Windows Server/ reindex: body: source: diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index 2e511d15622..10a2da21ac9 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -19,19 +19,17 @@ package org.elasticsearch.http.netty4; -import io.netty.handler.codec.http.FullHttpRequest; -import io.netty.handler.codec.http.HttpRequest; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.transport.netty4.Netty4Utils; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestUtils; - import io.netty.channel.Channel; +import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpMethod; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.transport.netty4.Netty4Utils; + import java.net.SocketAddress; -import java.util.HashMap; import java.util.Map; public class Netty4HttpRequest extends RestRequest { @@ -40,8 +38,8 @@ public class Netty4HttpRequest extends RestRequest { private final Channel channel; private final BytesReference content; - Netty4HttpRequest(FullHttpRequest request, Channel channel) { - super(request.uri()); + Netty4HttpRequest(NamedXContentRegistry xContentRegistry, FullHttpRequest request, Channel channel) { + super(xContentRegistry, request.uri()); this.request = request; this.channel = channel; if (request.content().isReadable()) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 6e12970c6df..a83c62e5221 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -25,6 +25,7 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.FullHttpRequest; + import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; import org.elasticsearch.transport.netty4.Netty4Utils; @@ -65,7 +66,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler { request.headers(), request.trailingHeaders()); - final Netty4HttpRequest httpRequest = new Netty4HttpRequest(copy, ctx.channel()); + final Netty4HttpRequest httpRequest = new Netty4HttpRequest(serverTransport.xContentRegistry, copy, ctx.channel()); serverTransport.dispatchRequest( httpRequest, new Netty4HttpChannel(serverTransport, httpRequest, pipelinedRequest, detailedErrorsEnabled, threadContext)); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 90ca1b9a6a5..00b86d813ad 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -61,6 +61,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerAdapter; @@ -193,6 +195,10 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem protected final boolean detailedErrorsEnabled; protected final ThreadPool threadPool; + /** + * The registry used to construct parsers so they support {@link XContentParser#namedObject(Class, String, Object)}. + */ + protected final NamedXContentRegistry xContentRegistry; protected final boolean tcpNoDelay; protected final boolean tcpKeepAlive; @@ -218,11 +224,13 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem private final Netty4CorsConfig corsConfig; - public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool) { + public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, + NamedXContentRegistry xContentRegistry) { super(settings); this.networkService = networkService; this.bigArrays = bigArrays; this.threadPool = threadPool; + this.xContentRegistry = xContentRegistry; ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings); this.maxChunkSize = SETTING_HTTP_MAX_CHUNK_SIZE.get(settings); @@ -543,9 +551,9 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem private final Netty4HttpRequestHandler requestHandler; protected HttpChannelHandler( - final Netty4HttpServerTransport transport, - final boolean detailedErrorsEnabled, - final ThreadContext threadContext) { + final Netty4HttpServerTransport transport, + final boolean detailedErrorsEnabled, + final ThreadContext threadContext) { this.transport = transport; this.requestHandler = new Netty4HttpRequestHandler(transport, detailedErrorsEnabled, threadContext); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java index 32170a7d499..6a435c19efa 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -92,10 +93,9 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin { @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, - CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, - NetworkService networkService) { + CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NamedXContentRegistry xContentRegistry, NetworkService networkService) { return Collections.singletonMap(NETTY_HTTP_TRANSPORT_NAME, - () -> new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)); + () -> new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry)); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index a5e0381b3fd..457b2242af4 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -41,6 +41,7 @@ import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpVersion; import io.netty.util.Attribute; import io.netty.util.AttributeKey; + import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -187,12 +188,12 @@ public class Netty4HttpChannelTests extends ESTestCase { public void testHeadersSet() { Settings settings = Settings.builder().build(); try (Netty4HttpServerTransport httpServerTransport = - new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) { + new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry())) { httpServerTransport.start(); final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); httpRequest.headers().add(HttpHeaderNames.ORIGIN, "remote"); final WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); - Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, writeCapturingChannel); + Netty4HttpRequest request = new Netty4HttpRequest(xContentRegistry(), httpRequest, writeCapturingChannel); // send a response Netty4HttpChannel channel = @@ -217,7 +218,7 @@ public class Netty4HttpChannelTests extends ESTestCase { public void testConnectionClose() throws Exception { final Settings settings = Settings.builder().build(); try (Netty4HttpServerTransport httpServerTransport = - new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) { + new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry())) { httpServerTransport.start(); final FullHttpRequest httpRequest; final boolean close = randomBoolean(); @@ -233,7 +234,7 @@ public class Netty4HttpChannelTests extends ESTestCase { } } final EmbeddedChannel embeddedChannel = new EmbeddedChannel(); - final Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, embeddedChannel); + final Netty4HttpRequest request = new Netty4HttpRequest(xContentRegistry(), httpRequest, embeddedChannel); // send a response, the channel close status should match assertTrue(embeddedChannel.isOpen()); @@ -252,7 +253,7 @@ public class Netty4HttpChannelTests extends ESTestCase { private FullHttpResponse executeRequest(final Settings settings, final String originValue, final String host) { // construct request and send it over the transport layer try (Netty4HttpServerTransport httpServerTransport = - new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) { + new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry())) { httpServerTransport.start(); final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); if (originValue != null) { @@ -260,7 +261,7 @@ public class Netty4HttpChannelTests extends ESTestCase { } httpRequest.headers().add(HttpHeaderNames.HOST, host); final WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); - final Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, writeCapturingChannel); + final Netty4HttpRequest request = new Netty4HttpRequest(xContentRegistry(), httpRequest, writeCapturingChannel); Netty4HttpChannel channel = new Netty4HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext()); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 701baf80aed..5c7a249f74a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -159,7 +159,8 @@ public class Netty4HttpServerPipeliningTests extends ESTestCase { super(settings, Netty4HttpServerPipeliningTests.this.networkService, Netty4HttpServerPipeliningTests.this.bigArrays, - Netty4HttpServerPipeliningTests.this.threadPool); + Netty4HttpServerPipeliningTests.this.threadPool, + xContentRegistry()); } @Override diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index 498daf63226..7481ba4c3a3 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -120,7 +120,8 @@ public class Netty4HttpServerTransportTests extends ESTestCase { * Test that {@link Netty4HttpServerTransport} supports the "Expect: 100-continue" HTTP header */ public void testExpectContinueHeader() throws Exception { - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool)) { + try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool, + xContentRegistry())) { transport.httpServerAdapter((request, channel, context) -> channel.sendResponse(new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, new BytesArray("done")))); transport.start(); @@ -143,12 +144,13 @@ public class Netty4HttpServerTransportTests extends ESTestCase { } public void testBindUnavailableAddress() { - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool)) { + try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool, + xContentRegistry())) { transport.start(); TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); Settings settings = Settings.builder().put("http.port", remoteAddress.getPort()).build(); - try (Netty4HttpServerTransport otherTransport = new Netty4HttpServerTransport(settings, networkService, bigArrays, - threadPool)) { + try (Netty4HttpServerTransport otherTransport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, + xContentRegistry())) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, () -> otherTransport.start()); assertEquals("Failed to bind to [" + remoteAddress.getPort() + "]", bindHttpException.getMessage()); } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index 7499232346d..1250fb38a97 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.UnicastZenPing; @@ -67,12 +68,13 @@ public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin @Override public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - SearchRequestParsers searchRequestParsers) { + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + SearchRequestParsers searchRequestParsers, + NamedXContentRegistry xContentRegistry) { final int concurrentConnects = UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[file_based_discovery_resolve]"); fileBasedDiscoveryExecutorService = EsExecutors.newScaling( diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java index 3dc378f6cb0..f8f6ade8634 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java @@ -23,17 +23,23 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + public class FileBasedDiscoveryPluginTests extends ESTestCase { - public void testHostsProviderBwc() { + public void testHostsProviderBwc() throws IOException { FileBasedDiscoveryPlugin plugin = new FileBasedDiscoveryPlugin(Settings.EMPTY); Settings additionalSettings = plugin.additionalSettings(); assertEquals("file", additionalSettings.get(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey())); + assertWarnings("Using discovery.type setting to set hosts provider is deprecated. " + + "Set \"discovery.zen.hosts_provider: file\" instead"); } - public void testHostsProviderExplicit() { + public void testHostsProviderExplicit() throws IOException { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "foo").build(); FileBasedDiscoveryPlugin plugin = new FileBasedDiscoveryPlugin(settings); assertEquals(Settings.EMPTY, plugin.additionalSettings()); + assertWarnings("Using discovery.type setting to set hosts provider is deprecated. " + + "Set \"discovery.zen.hosts_provider: file\" instead"); } } \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java index 0ba79ecc9fb..f7f474711be 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java @@ -38,6 +38,7 @@ import java.util.Map; import java.util.Set; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readIntProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -52,23 +53,36 @@ public final class AttachmentProcessor extends AbstractProcessor { private final String targetField; private final Set properties; private final int indexedChars; + private final boolean ignoreMissing; AttachmentProcessor(String tag, String field, String targetField, Set properties, - int indexedChars) throws IOException { + int indexedChars, boolean ignoreMissing) throws IOException { super(tag); this.field = field; this.targetField = targetField; this.properties = properties; this.indexedChars = indexedChars; + this.ignoreMissing = ignoreMissing; + } + + boolean isIgnoreMissing() { + return ignoreMissing; } @Override public void execute(IngestDocument ingestDocument) { Map additionalFields = new HashMap<>(); + byte[] input = ingestDocument.getFieldValueAsBytes(field, ignoreMissing); + + if (input == null && ignoreMissing) { + return; + } else if (input == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot parse."); + } + try { Metadata metadata = new Metadata(); - byte[] input = ingestDocument.getFieldValueAsBytes(field); String parsedContent = TikaImpl.parse(input, metadata, indexedChars); if (properties.contains(Property.CONTENT) && Strings.hasLength(parsedContent)) { @@ -166,6 +180,7 @@ public final class AttachmentProcessor extends AbstractProcessor { String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment"); List properyNames = readOptionalList(TYPE, processorTag, config, "properties"); int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED); + boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); final Set properties; if (properyNames != null) { @@ -182,7 +197,7 @@ public final class AttachmentProcessor extends AbstractProcessor { properties = DEFAULT_PROPERTIES; } - return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars); + return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars, ignoreMissing); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java index 2848d6c3c64..376214eb173 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java @@ -52,6 +52,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("attachment")); assertThat(processor.getProperties(), sameInstance(AttachmentProcessor.Factory.DEFAULT_PROPERTIES)); + assertFalse(processor.isIgnoreMissing()); } public void testConfigureIndexedChars() throws Exception { @@ -64,6 +65,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getIndexedChars(), is(indexedChars)); + assertFalse(processor.isIgnoreMissing()); } public void testBuildTargetField() throws Exception { @@ -73,6 +75,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { AttachmentProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); + assertFalse(processor.isIgnoreMissing()); } public void testBuildFields() throws Exception { @@ -90,6 +93,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { AttachmentProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); + assertFalse(processor.isIgnoreMissing()); } public void testBuildIllegalFieldOption() throws Exception { @@ -117,4 +121,19 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); } } + + public void testIgnoreMissing() throws Exception { + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("ignore_missing", true); + + String processorTag = randomAsciiOfLength(10); + + AttachmentProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("attachment")); + assertThat(processor.getProperties(), sameInstance(AttachmentProcessor.Factory.DEFAULT_PROPERTIES)); + assertTrue(processor.isIgnoreMissing()); + } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 3708a290dec..b59457b5b01 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.ingest.attachment; import org.apache.commons.io.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -30,14 +31,17 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Base64; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -52,7 +56,7 @@ public class AttachmentProcessorTests extends ESTestCase { @Before public void createStandardProcessor() throws IOException { processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", - "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000); + "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000, false); } public void testEnglishTextDocument() throws Exception { @@ -85,7 +89,7 @@ public class AttachmentProcessorTests extends ESTestCase { selectedProperties.add(AttachmentProcessor.Property.DATE); } processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", - "target_field", selectedProperties, 10000); + "target_field", selectedProperties, 10000, false); Map attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor); assertThat(attachmentData.keySet(), hasSize(selectedFieldNames.length)); @@ -199,6 +203,40 @@ public class AttachmentProcessorTests extends ESTestCase { assertThat(attachmentData.get("content_length"), is(notNullValue())); } + public void testNullValueWithIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNonExistentWithIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNullWithoutIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, false); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot parse.")); + } + + public void testNonExistentWithoutIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, false); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); + } + private Map parseDocument(String file, AttachmentProcessor processor) throws Exception { Map document = new HashMap<>(); document.put("source_field", getAsBase64(file)); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 95a0b85dba3..542aa5e6b3f 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -50,6 +50,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -63,18 +64,32 @@ public final class GeoIpProcessor extends AbstractProcessor { private final String targetField; private final DatabaseReader dbReader; private final Set properties; + private final boolean ignoreMissing; - GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties) throws IOException { + GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties, + boolean ignoreMissing) throws IOException { super(tag); this.field = field; this.targetField = targetField; this.dbReader = dbReader; this.properties = properties; + this.ignoreMissing = ignoreMissing; + } + + boolean isIgnoreMissing() { + return ignoreMissing; } @Override public void execute(IngestDocument ingestDocument) { - String ip = ingestDocument.getFieldValue(field, String.class); + String ip = ingestDocument.getFieldValue(field, String.class, ignoreMissing); + + if (ip == null && ignoreMissing) { + return; + } else if (ip == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot extract geoip information."); + } + final InetAddress ipAddress = InetAddresses.forString(ip); Map geoData; @@ -268,6 +283,7 @@ public final class GeoIpProcessor extends AbstractProcessor { String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb.gz"); List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); + boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); DatabaseReader databaseReader = databaseReaders.get(databaseFile); if (databaseReader == null) { @@ -298,7 +314,7 @@ public final class GeoIpProcessor extends AbstractProcessor { } } - return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties); + return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties, ignoreMissing); } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 162137b5f3c..ca0b5964dc2 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -85,6 +85,24 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertFalse(processor.isIgnoreMissing()); + } + + public void testSetIgnoreMissing() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("ignore_missing", true); + String processorTag = randomAsciiOfLength(10); + + GeoIpProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("geoip")); + assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); + assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertTrue(processor.isIgnoreMissing()); } public void testCountryBuildDefaults() throws Exception { @@ -102,6 +120,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertFalse(processor.isIgnoreMissing()); } public void testBuildTargetField() throws Exception { @@ -112,6 +131,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); + assertFalse(processor.isIgnoreMissing()); } public void testBuildDbFile() throws Exception { @@ -124,6 +144,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertFalse(processor.isIgnoreMissing()); } public void testBuildWithCountryDbAndCityFields() throws Exception { @@ -174,6 +195,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); + assertFalse(processor.isIgnoreMissing()); } public void testBuildIllegalFieldOption() throws Exception { diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 3b2f65e2814..221c06d3b68 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -20,17 +20,20 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.zip.GZIPInputStream; +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -40,7 +43,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "8.8.8.8"); @@ -64,10 +67,52 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("location"), equalTo(location)); } + public void testNullValueWithIgnoreMissing() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNonExistentWithIgnoreMissing() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNullWithoutIgnoreMissing() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot extract geoip information.")); + } + + public void testNonExistentWithoutIgnoreMissing() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); + } + public void testCity_withIpV6() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); String address = "2602:306:33d3:8000::3257:9652"; Map document = new HashMap<>(); @@ -95,7 +140,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCityWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "93.114.45.13"); @@ -112,7 +157,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountry() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -132,7 +177,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountryWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "93.114.45.13"); @@ -149,7 +194,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "127.0.0.1"); @@ -162,7 +207,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testInvalid() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); document.put("source_field", "www.google.com"); diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java index af764d5baf2..55c79adb7e0 100644 --- a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.useragent; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -52,7 +53,8 @@ final class UserAgentParser { } private void init(InputStream regexStream) throws IOException { - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(regexStream); + // EMPTY is safe here because we don't use namedObject + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(NamedXContentRegistry.EMPTY, regexStream); XContentParser.Token token = yamlParser.nextToken(); diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java index ec18126457b..93f210c427b 100644 --- a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java @@ -34,6 +34,7 @@ import java.util.Map; import java.util.Set; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -44,20 +45,32 @@ public class UserAgentProcessor extends AbstractProcessor { private final String field; private final String targetField; private final Set properties; - private final UserAgentParser parser; + private final boolean ignoreMissing; - public UserAgentProcessor(String tag, String field, String targetField, UserAgentParser parser, Set properties) { + public UserAgentProcessor(String tag, String field, String targetField, UserAgentParser parser, Set properties, + boolean ignoreMissing) { super(tag); this.field = field; this.targetField = targetField; this.parser = parser; this.properties = properties; + this.ignoreMissing = ignoreMissing; + } + + boolean isIgnoreMissing() { + return ignoreMissing; } @Override public void execute(IngestDocument ingestDocument) throws Exception { - String userAgent = ingestDocument.getFieldValue(field, String.class); + String userAgent = ingestDocument.getFieldValue(field, String.class, ignoreMissing); + + if (userAgent == null && ignoreMissing) { + return; + } else if (userAgent == null) { + throw new IllegalArgumentException("field [" + field + "] is null, cannot parse user-agent."); + } Details uaClient = parser.parse(userAgent); @@ -99,7 +112,7 @@ public class UserAgentProcessor extends AbstractProcessor { else { uaDetails.put("os", "Other"); } - + break; case OS_NAME: if (uaClient.operatingSystem != null && uaClient.operatingSystem.name != null) { @@ -168,7 +181,7 @@ public class UserAgentProcessor extends AbstractProcessor { public String getType() { return TYPE; } - + String getField() { return field; } @@ -180,7 +193,7 @@ public class UserAgentProcessor extends AbstractProcessor { Set getProperties() { return properties; } - + UserAgentParser getUaParser() { return parser; } @@ -188,7 +201,7 @@ public class UserAgentProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { private final Map userAgentParsers; - + public Factory(Map userAgentParsers) { this.userAgentParsers = userAgentParsers; } @@ -200,13 +213,14 @@ public class UserAgentProcessor extends AbstractProcessor { String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "user_agent"); String regexFilename = readStringProperty(TYPE, processorTag, config, "regex_file", IngestUserAgentPlugin.DEFAULT_PARSER_NAME); List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); + boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); UserAgentParser parser = userAgentParsers.get(regexFilename); if (parser == null) { throw newConfigurationException(TYPE, processorTag, "regex_file", "regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)"); } - + final Set properties; if (propertyNames != null) { properties = EnumSet.noneOf(Property.class); @@ -221,7 +235,7 @@ public class UserAgentProcessor extends AbstractProcessor { properties = EnumSet.allOf(Property.class); } - return new UserAgentProcessor(processorTag, field, targetField, parser, properties); + return new UserAgentProcessor(processorTag, field, targetField, parser, properties, ignoreMissing); } } diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java index 10c6ccd7941..c1c46283076 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java @@ -89,6 +89,27 @@ public class UserAgentProcessorFactoryTests extends ESTestCase { assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); assertThat(processor.getUaParser().getDevicePatterns().size(), greaterThan(0)); assertThat(processor.getProperties(), equalTo(EnumSet.allOf(UserAgentProcessor.Property.class))); + assertFalse(processor.isIgnoreMissing()); + } + + public void testBuildWithIgnoreMissing() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("ignore_missing", true); + + String processorTag = randomAsciiOfLength(10); + + UserAgentProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("user_agent")); + assertThat(processor.getUaParser().getUaPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getDevicePatterns().size(), greaterThan(0)); + assertThat(processor.getProperties(), equalTo(EnumSet.allOf(UserAgentProcessor.Property.class))); + assertTrue(processor.isIgnoreMissing()); } public void testBuildTargetField() throws Exception { diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java index d9b5eed059a..2c9e72ae9d9 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -27,55 +27,96 @@ import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; public class UserAgentProcessorTests extends ESTestCase { private static UserAgentProcessor processor; - + @BeforeClass public static void setupProcessor() throws IOException { InputStream regexStream = UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"); assertNotNull(regexStream); - + UserAgentParser parser = new UserAgentParser(randomAsciiOfLength(10), regexStream, new UserAgentCache(1000)); - + processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", parser, - EnumSet.allOf(UserAgentProcessor.Property.class)); + EnumSet.allOf(UserAgentProcessor.Property.class), false); } - + + public void testNullValueWithIgnoreMissing() throws Exception { + UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + EnumSet.allOf(UserAgentProcessor.Property.class), true); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNonExistentWithIgnoreMissing() throws Exception { + UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + EnumSet.allOf(UserAgentProcessor.Property.class), true); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + } + + public void testNullWithoutIgnoreMissing() throws Exception { + UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + EnumSet.allOf(UserAgentProcessor.Property.class), false); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), + Collections.singletonMap("source_field", null)); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot parse user-agent.")); + } + + public void testNonExistentWithoutIgnoreMissing() throws Exception { + UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + EnumSet.allOf(UserAgentProcessor.Property.class), false); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); + } + @SuppressWarnings("unchecked") public void testCommonBrowser() throws Exception { Map document = new HashMap<>(); document.put("source_field", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - + processor.execute(ingestDocument); Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); - + assertThat(target.get("name"), is("Chrome")); assertThat(target.get("major"), is("33")); assertThat(target.get("minor"), is("0")); assertThat(target.get("patch"), is("1750")); assertNull(target.get("build")); - + assertThat(target.get("os"), is("Mac OS X 10.9.2")); assertThat(target.get("os_name"), is("Mac OS X")); assertThat(target.get("os_major"), is("10")); assertThat(target.get("os_minor"), is("9")); - + assertThat(target.get("device"), is("Other")); } - + @SuppressWarnings("unchecked") public void testUncommonDevice() throws Exception { Map document = new HashMap<>(); @@ -83,78 +124,78 @@ public class UserAgentProcessorTests extends ESTestCase { "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10+ " + "(KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - + processor.execute(ingestDocument); Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); - + assertThat(target.get("name"), is("Android")); assertThat(target.get("major"), is("3")); assertThat(target.get("minor"), is("0")); assertNull(target.get("patch")); assertNull(target.get("build")); - + assertThat(target.get("os"), is("Android 3.0")); assertThat(target.get("os_name"), is("Android")); assertThat(target.get("os_major"), is("3")); assertThat(target.get("os_minor"), is("0")); - + assertThat(target.get("device"), is("Motorola Xoom")); } - + @SuppressWarnings("unchecked") public void testSpider() throws Exception { Map document = new HashMap<>(); document.put("source_field", "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - + processor.execute(ingestDocument); Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); - + assertThat(target.get("name"), is("EasouSpider")); assertNull(target.get("major")); assertNull(target.get("minor")); assertNull(target.get("patch")); assertNull(target.get("build")); - + assertThat(target.get("os"), is("Other")); assertThat(target.get("os_name"), is("Other")); assertNull(target.get("os_major")); assertNull(target.get("os_minor")); - + assertThat(target.get("device"), is("Spider")); } - + @SuppressWarnings("unchecked") public void testUnknown() throws Exception { Map document = new HashMap<>(); document.put("source_field", "Something I made up v42.0.1"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - + processor.execute(ingestDocument); Map data = ingestDocument.getSourceAndMetadata(); assertThat(data, hasKey("target_field")); Map target = (Map) data.get("target_field"); - + assertThat(target.get("name"), is("Other")); assertNull(target.get("major")); assertNull(target.get("minor")); assertNull(target.get("patch")); assertNull(target.get("build")); - + assertThat(target.get("os"), is("Other")); assertThat(target.get("os_name"), is("Other")); assertNull(target.get("os_major")); assertNull(target.get("os_minor")); - + assertThat(target.get("device"), is("Other")); } } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index af5886bb6f9..325ac726713 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -63,8 +63,8 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { Supplier queryShardContext = () -> { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; - parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), - indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext); + parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.getIndexAnalyzers(), + indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, queryShardContext); } @Override @@ -160,8 +160,9 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { Supplier queryShardContext = () -> { return indexService2x.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }); }; - DocumentMapperParser parser = new DocumentMapperParser(indexService2x.getIndexSettings(), indexService2x.mapperService(), indexService2x.getIndexAnalyzers(), - indexService2x.similarityService(), mapperRegistry, queryShardContext); + DocumentMapperParser parser = new DocumentMapperParser(indexService2x.getIndexSettings(), indexService2x.mapperService(), + indexService2x.getIndexAnalyzers(), indexService2x.xContentRegistry(), indexService2x.similarityService(), mapperRegistry, + queryShardContext); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, defaultMapper.mappingSource().string()); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 14595d13448..7e37cc1069d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.repositories.s3; -import java.io.IOException; - import com.amazonaws.Protocol; import com.amazonaws.services.s3.AbstractAmazonS3; import com.amazonaws.services.s3.AmazonS3; @@ -34,6 +32,8 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.io.IOException; + import static org.elasticsearch.repositories.s3.S3Repository.Repositories; import static org.elasticsearch.repositories.s3.S3Repository.Repository; import static org.elasticsearch.repositories.s3.S3Repository.getValue; @@ -111,11 +111,14 @@ public class S3RepositoryTests extends ESTestCase { .put(Repository.BASE_PATH_SETTING.getKey(), "/foo/bar").build()); S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, new DummyS3Service()); assertEquals("foo/bar/", s3repo.basePath().buildAsString()); // make sure leading `/` is removed and trailing is added - + assertWarnings("S3 repository base_path" + + " trimming the leading `/`, and leading `/` will not be supported for the S3 repository in future releases"); metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.EMPTY); Settings settings = Settings.builder().put(Repositories.BASE_PATH_SETTING.getKey(), "/foo/bar").build(); s3repo = new S3Repository(metadata, settings, new DummyS3Service()); assertEquals("foo/bar/", s3repo.basePath().buildAsString()); // make sure leading `/` is removed and trailing is added + assertWarnings("S3 repository base_path" + + " trimming the leading `/`, and leading `/` will not be supported for the S3 repository in future releases"); } public void testDefaultBufferSize() { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java index 23da3a99df5..fa04b51ff59 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -19,12 +19,14 @@ package org.elasticsearch.bootstrap; +import java.util.Map; + import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { @@ -39,8 +41,11 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { true, output -> {}, (foreground, pidFile, quiet, esSettings) -> { - assertThat(esSettings.size(), equalTo(1)); - assertThat(esSettings, hasEntry("path.home", value)); + Map settings = esSettings.getAsMap(); + settings.keySet().forEach(System.out::println); + assertThat(settings.size(), equalTo(2)); + assertThat(settings, hasEntry("path.home", value)); + assertThat(settings, hasKey("path.logs")); // added by env initialization }); System.clearProperty("es.path.home"); @@ -50,8 +55,10 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { true, output -> {}, (foreground, pidFile, quiet, esSettings) -> { - assertThat(esSettings.size(), equalTo(1)); - assertThat(esSettings, hasEntry("path.home", commandLineValue)); + Map settings = esSettings.getAsMap(); + assertThat(settings.size(), equalTo(2)); + assertThat(settings, hasEntry("path.home", commandLineValue)); + assertThat(settings, hasKey("path.logs")); // added by env initialization }, "-Epath.home=" + commandLineValue); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index dd04d2de50e..1c706cb7b4f 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -96,6 +96,7 @@ public class EvilLoggerTests extends ESTestCase { Level.WARN, "org.elasticsearch.common.logging.DeprecationLogger.deprecated", "This is a deprecation message"); + assertWarnings("This is a deprecation message"); } public void testFindAppender() throws IOException, UserException { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 70c5f633da1..96e64e5c888 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -207,8 +207,7 @@ public class InstallPluginCommandTests extends ESTestCase { } static MockTerminal installPlugin(String pluginUrl, Path home, boolean jarHellCheck) throws Exception { - Map settings = new HashMap<>(); - settings.put("path.home", home.toString()); + Environment env = new Environment(Settings.builder().put("path.home", home).build()); MockTerminal terminal = new MockTerminal(); new InstallPluginCommand() { @Override @@ -217,7 +216,7 @@ public class InstallPluginCommandTests extends ESTestCase { super.jarHellCheck(candidate, pluginsDir); } } - }.execute(terminal, pluginUrl, true, settings); + }.execute(terminal, pluginUrl, true, env); return terminal; } @@ -680,13 +679,11 @@ public class InstallPluginCommandTests extends ESTestCase { // if batch is enabled, we also want to add a security policy String pluginZip = createPlugin("fake", pluginDir, isBatch); - Map settings = new HashMap<>(); - settings.put("path.home", env.v1().toString()); new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { } - }.execute(terminal, pluginZip, isBatch, settings); + }.execute(terminal, pluginZip, isBatch, env.v2()); } // TODO: test checksum (need maven/official below) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index d7d3efb9742..6306564e6a3 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -61,10 +61,9 @@ public class RemovePluginCommandTests extends ESTestCase { } static MockTerminal removePlugin(String name, Path home) throws Exception { - Map settings = new HashMap<>(); - settings.put("path.home", home.toString()); + Environment env = new Environment(Settings.builder().put("path.home", home).build()); MockTerminal terminal = new MockTerminal(); - new RemovePluginCommand().execute(terminal, name, settings); + new RemovePluginCommand().execute(terminal, name, env); return terminal; } diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java index dd2320a6855..f01011b5f61 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -20,6 +20,9 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.nio.file.Path; @@ -32,7 +35,7 @@ import static org.hamcrest.CoreMatchers.equalTo; abstract class ESElasticsearchCliTestCase extends ESTestCase { interface InitConsumer { - void accept(final boolean foreground, final Path pidFile, final boolean quiet, final Map esSettings); + void accept(final boolean foreground, final Path pidFile, final boolean quiet, final Settings initialSettings); } void runTest( @@ -42,13 +45,21 @@ abstract class ESElasticsearchCliTestCase extends ESTestCase { final InitConsumer initConsumer, String... args) throws Exception { final MockTerminal terminal = new MockTerminal(); + Path home = createTempDir(); try { final AtomicBoolean init = new AtomicBoolean(); final int status = Elasticsearch.main(args, new Elasticsearch() { @Override - void init(final boolean daemonize, final Path pidFile, final boolean quiet, final Map esSettings) { + protected Environment createEnv(Terminal terminal, Map settings) { + Settings realSettings = Settings.builder() + .put("path.home", home) + .put(settings).build(); + return new Environment(realSettings); + } + @Override + void init(final boolean daemonize, final Path pidFile, final boolean quiet, Settings initialSettings) { init.set(true); - initConsumer.accept(!daemonize, pidFile, quiet, esSettings); + initConsumer.accept(!daemonize, pidFile, quiet, initialSettings); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 854260f02a4..b01ee7cc5c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.mapper.MapperService; @@ -39,12 +40,14 @@ import static org.elasticsearch.test.ESTestCase.createTestAnalysis; public class MapperTestUtils { - public static MapperService newMapperService(Path tempDir, Settings indexSettings) throws IOException { + public static MapperService newMapperService(NamedXContentRegistry xContentRegistry, Path tempDir, Settings indexSettings) + throws IOException { IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); - return newMapperService(tempDir, indexSettings, indicesModule); + return newMapperService(xContentRegistry, tempDir, indexSettings, indicesModule); } - public static MapperService newMapperService(Path tempDir, Settings settings, IndicesModule indicesModule) throws IOException { + public static MapperService newMapperService(NamedXContentRegistry xContentRegistry, Path tempDir, Settings settings, + IndicesModule indicesModule) throws IOException { Settings.Builder settingsBuilder = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(settings); @@ -58,6 +61,7 @@ public class MapperTestUtils { SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); return new MapperService(indexSettings, indexAnalyzers, + xContentRegistry, similarityService, mapperRegistry, () -> null); diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 93021be95fe..605b9026c26 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.shard; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexNotFoundException; import org.apache.lucene.index.LeafReader; @@ -30,7 +29,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; @@ -262,7 +260,8 @@ public abstract class IndexShardTestCase extends ESTestCase { boolean success = false; try { IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), indexSettings.getSettings()); + MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), + indexSettings.getSettings()); mapperService.merge(indexMetaData, MapperService.MergeReason.MAPPING_RECOVERY, true); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); final IndexEventListener indexEventListener = new IndexEventListener() { diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index 03f36bc1db2..db00d480a8a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -296,7 +297,8 @@ public class RandomSearchRequestGenerator { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes()); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, + jsonBuilder.bytes()); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 2055cde567e..2263825b624 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import com.fasterxml.jackson.core.io.JsonStringEncoder; + import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -47,13 +48,12 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -117,15 +117,16 @@ import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import java.util.stream.Stream; import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; public abstract class AbstractQueryTestCase> extends ESTestCase { @@ -156,11 +157,6 @@ public abstract class AbstractQueryTestCase> private static String[] currentTypes; private static String[] randomTypes; - /** - * used to check warning headers of the deprecation logger - */ - private ThreadContext threadContext; - protected static Index getIndex() { return index; } @@ -187,7 +183,6 @@ public abstract class AbstractQueryTestCase> .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .build(); indexSettings = Settings.builder() - .put(ParseFieldMatcher.PARSE_STRICT, true) .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated).build(); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); @@ -214,20 +209,6 @@ public abstract class AbstractQueryTestCase> serviceHolder = new ServiceHolder(nodeSettings, indexSettings, getPlugins(), this); } serviceHolder.clientInvocationHandler.delegate = this; - this.threadContext = new ThreadContext(Settings.EMPTY); - DeprecationLogger.setThreadContext(threadContext); - } - - /** - * Check that there are no unaccounted warning headers. These should be checked with {@link #checkWarningHeaders(String...)} in the - * appropriate test - */ - @After - public void teardown() throws IOException { - final List warnings = threadContext.getResponseHeaders().get(DeprecationLogger.DEPRECATION_HEADER); - assertNull("unexpected warning headers", warnings); - DeprecationLogger.removeThreadContext(this.threadContext); - this.threadContext.close(); } private static SearchContext getSearchContext(String[] types, QueryShardContext context) { @@ -247,7 +228,7 @@ public abstract class AbstractQueryTestCase> } @After - public void afterTest() { + public void afterTest() throws IOException { serviceHolder.clientInvocationHandler.delegate = null; } @@ -421,7 +402,7 @@ public abstract class AbstractQueryTestCase> BytesStreamOutput out = new BytesStreamOutput(); try ( XContentGenerator generator = XContentType.JSON.xContent().createGenerator(out); - XContentParser parser = JsonXContent.jsonXContent.createParser(query); + XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, query); ) { int objectIndex = -1; Deque levels = new LinkedList<>(); @@ -1026,21 +1007,9 @@ public abstract class AbstractQueryTestCase> return query; } - protected void checkWarningHeaders(String... messages) { - final List warnings = threadContext.getResponseHeaders().get(DeprecationLogger.DEPRECATION_HEADER); - assertThat(warnings, hasSize(messages.length)); - for (String msg : messages) { - assertThat(warnings, hasItem(equalTo(msg))); - } - // "clear" current warning headers by setting a new ThreadContext - DeprecationLogger.removeThreadContext(this.threadContext); - try { - this.threadContext.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - this.threadContext = new ThreadContext(Settings.EMPTY); - DeprecationLogger.setThreadContext(this.threadContext); + @Override + protected NamedXContentRegistry xContentRegistry() { + return serviceHolder.xContentRegistry; } private static class ServiceHolder implements Closeable { @@ -1049,6 +1018,7 @@ public abstract class AbstractQueryTestCase> private final IndexFieldDataService indexFieldDataService; private final SearchModule searchModule; private final NamedWriteableRegistry namedWriteableRegistry; + private final NamedXContentRegistry xContentRegistry; private final ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler(); private final IndexSettings idxSettings; private final SimilarityService similarityService; @@ -1077,7 +1047,10 @@ public abstract class AbstractQueryTestCase> List entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries); + namedWriteableRegistry = new NamedWriteableRegistry(entries); + xContentRegistry = new NamedXContentRegistry(Stream.of( + searchModule.getNamedXContents().stream() + ).flatMap(Function.identity()).collect(toList())); IndexScopedSettings indexScopedSettings = settingsModule.getIndexScopedSettings(); idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); AnalysisModule analysisModule = new AnalysisModule(new Environment(nodeSettings), emptyList()); @@ -1085,7 +1058,8 @@ public abstract class AbstractQueryTestCase> scriptService = scriptModule.getScriptService(); similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - mapperService = new MapperService(idxSettings, indexAnalyzers, similarityService, mapperRegistry, this::createShardContext); + mapperService = new MapperService(idxSettings, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, + this::createShardContext); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, @@ -1124,7 +1098,6 @@ public abstract class AbstractQueryTestCase> MapperService.MergeReason.MAPPING_UPDATE, false); } testCase.initializeAdditionalMappings(mapperService); - this.namedWriteableRegistry = namedWriteableRegistry; } @Override @@ -1133,7 +1106,7 @@ public abstract class AbstractQueryTestCase> QueryShardContext createShardContext() { return new QueryShardContext(0, idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, - scriptService, indicesQueriesRegistry, this.client, null, () -> nowInMillis); + scriptService, xContentRegistry, indicesQueriesRegistry, this.client, null, () -> nowInMillis); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index cb31ac6028b..fd3af224a1f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -23,6 +23,7 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.http.HttpHost; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; @@ -88,6 +89,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -339,6 +341,13 @@ public abstract class ESIntegTestCase extends ESTestCase { initializeSuiteScope(); } + @Override + protected final boolean enableWarningsCheck() { + //In an integ test it doesn't make sense to keep track of warnings: if the cluster is external the warnings are in another jvm, + //if the cluster is internal the deprecation logger is shared across all nodes + return false; + } + protected final void beforeInternal() throws Exception { final Scope currentClusterScope = getCurrentClusterScope(); switch (currentClusterScope) { @@ -2106,6 +2115,11 @@ public abstract class ESIntegTestCase extends ESTestCase { return builder.build(); } + @Override + protected NamedXContentRegistry xContentRegistry() { + return internalCluster().getInstance(NamedXContentRegistry.class); + } + /** * Returns an instance of {@link RestClient} pointing to the current test cluster. * Creates a new client if the method is invoked for the first time in the context of the current test scope. diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 0b2adfa52e1..32dac0fd837 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; @@ -293,7 +294,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { protected SearchContext createSearchContext(IndexService indexService) { BigArrays bigArrays = indexService.getBigArrays(); ThreadPool threadPool = indexService.getThreadPool(); - ScriptService scriptService = node().injector().getInstance(ScriptService.class); return new TestSearchContext(threadPool, bigArrays, indexService); } @@ -327,5 +327,8 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return actionGet.getStatus(); } - + @Override + protected NamedXContentRegistry xContentRegistry() { + return getInstanceFromNode(NamedXContentRegistry.class); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 9a4085ff2e4..a169274c172 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -57,12 +57,15 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -129,6 +132,8 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; /** * Base testcase for randomized unit testing with Elasticsearch @@ -173,6 +178,7 @@ public abstract class ESTestCase extends LuceneTestCase { } protected final Logger logger = Loggers.getLogger(getClass()); + private ThreadContext threadContext; // ----------------------------------------------------------------- // Suite and test case setup/cleanup. @@ -251,16 +257,62 @@ public abstract class ESTestCase extends LuceneTestCase { @Before public final void before() { logger.info("[{}]: before test", getTestName()); + if (enableWarningsCheck()) { + this.threadContext = new ThreadContext(Settings.EMPTY); + DeprecationLogger.setThreadContext(threadContext); + } + } + + /** + * Whether or not we check after each test whether it has left warnings behind. That happens if any deprecated feature or syntax + * was used by the test and the test didn't assert on it using {@link #assertWarnings(String...)}. + */ + protected boolean enableWarningsCheck() { + return true; } @After public final void after() throws Exception { checkStaticState(); + if (enableWarningsCheck()) { + ensureNoWarnings(); + } ensureAllSearchContextsReleased(); ensureCheckIndexPassed(); logger.info("[{}]: after test", getTestName()); } + private void ensureNoWarnings() throws IOException { + //Check that there are no unaccounted warning headers. These should be checked with {@link #checkWarningHeaders(String...)} in the + //appropriate test + try { + final List warnings = threadContext.getResponseHeaders().get(DeprecationLogger.WARNING_HEADER); + assertNull("unexpected warning headers", warnings); + } finally { + DeprecationLogger.removeThreadContext(this.threadContext); + this.threadContext.close(); + } + } + + protected final void assertWarnings(String... expectedWarnings) throws IOException { + if (enableWarningsCheck() == false) { + throw new IllegalStateException("unable to check warning headers if the test is not set to do so"); + } + try { + final List actualWarnings = threadContext.getResponseHeaders().get(DeprecationLogger.WARNING_HEADER); + assertThat(actualWarnings, hasSize(expectedWarnings.length)); + for (String msg : expectedWarnings) { + assertThat(actualWarnings, hasItem(equalTo(msg))); + } + } finally { + // "clear" current warning headers by setting a new ThreadContext + DeprecationLogger.removeThreadContext(this.threadContext); + this.threadContext.close(); + this.threadContext = new ThreadContext(Settings.EMPTY); + DeprecationLogger.setThreadContext(this.threadContext); + } + } + private static final List statusData = new ArrayList<>(); static { // ensure that the status logger is set to the warn level so we do not miss any warnings with our Log4j usage @@ -787,9 +839,8 @@ public abstract class ESTestCase extends LuceneTestCase { * recursive shuffling behavior can be made by passing in the names of fields which * internally should stay untouched. */ - public static XContentBuilder shuffleXContent(XContentBuilder builder, String... exceptFieldNames) throws IOException { - BytesReference bytes = builder.bytes(); - XContentParser parser = XContentFactory.xContent(bytes).createParser(bytes); + public XContentBuilder shuffleXContent(XContentBuilder builder, String... exceptFieldNames) throws IOException { + XContentParser parser = createParser(builder); // use ordered maps for reproducibility Map shuffledMap = shuffleMap(parser.mapOrdered(), new HashSet<>(Arrays.asList(exceptFieldNames))); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(builder.contentType()); @@ -885,35 +936,42 @@ public abstract class ESTestCase extends LuceneTestCase { * Create a new {@link XContentParser}. */ protected final XContentParser createParser(XContentBuilder builder) throws IOException { - return builder.generator().contentType().xContent().createParser(builder.bytes()); + return builder.generator().contentType().xContent().createParser(xContentRegistry(), builder.bytes()); } /** * Create a new {@link XContentParser}. */ protected final XContentParser createParser(XContent xContent, String data) throws IOException { - return xContent.createParser(data); + return xContent.createParser(xContentRegistry(), data); } /** * Create a new {@link XContentParser}. */ protected final XContentParser createParser(XContent xContent, InputStream data) throws IOException { - return xContent.createParser(data); + return xContent.createParser(xContentRegistry(), data); } /** * Create a new {@link XContentParser}. */ protected final XContentParser createParser(XContent xContent, byte[] data) throws IOException { - return xContent.createParser(data); + return xContent.createParser(xContentRegistry(), data); } /** * Create a new {@link XContentParser}. */ protected final XContentParser createParser(XContent xContent, BytesReference data) throws IOException { - return xContent.createParser(data); + return xContent.createParser(xContentRegistry(), data); + } + + /** + * The {@link NamedXContentRegistry} to use for this test. Subclasses should override and use liberally. + */ + protected NamedXContentRegistry xContentRegistry() { + return NamedXContentRegistry.EMPTY; } /** Returns the suite failure marker: internal use only! */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java new file mode 100644 index 00000000000..f23e243074d --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -0,0 +1,207 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; +import java.util.Random; + +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomUnicodeOfLengthBetween; + +public final class RandomObjects { + + private RandomObjects() { + + } + + /** + * Returns a tuple containing random stored field values and their corresponding expected values once printed out + * via {@link org.elasticsearch.common.xcontent.ToXContent#toXContent(XContentBuilder, ToXContent.Params)} and parsed back via + * {@link org.elasticsearch.common.xcontent.XContentParser#objectText()}. + * Generates values based on what can get printed out. Stored fields values are retrieved from lucene and converted via + * {@link org.elasticsearch.index.mapper.MappedFieldType#valueForDisplay(Object)} to either strings, numbers or booleans. + * + * @param random Random generator + * @param xContentType the content type, used to determine what the expected values are for float numbers. + */ + public static Tuple, List> randomStoredFieldValues(Random random, XContentType xContentType) { + int numValues = RandomNumbers.randomIntBetween(random, 1, 5); + List originalValues = new ArrayList<>(); + List expectedParsedValues = new ArrayList<>(); + int dataType = RandomNumbers.randomIntBetween(random, 0, 8); + for (int i = 0; i < numValues; i++) { + switch(dataType) { + case 0: + long randomLong = random.nextLong(); + originalValues.add(randomLong); + expectedParsedValues.add(randomLong); + break; + case 1: + int randomInt = random.nextInt(); + originalValues.add(randomInt); + expectedParsedValues.add(randomInt); + break; + case 2: + Short randomShort = (short) random.nextInt(); + originalValues.add(randomShort); + expectedParsedValues.add(randomShort.intValue()); + break; + case 3: + Byte randomByte = (byte)random.nextInt(); + originalValues.add(randomByte); + expectedParsedValues.add(randomByte.intValue()); + break; + case 4: + double randomDouble = random.nextDouble(); + originalValues.add(randomDouble); + expectedParsedValues.add(randomDouble); + break; + case 5: + Float randomFloat = random.nextFloat(); + originalValues.add(randomFloat); + if (xContentType == XContentType.CBOR) { + //with CBOR we get back a float + expectedParsedValues.add(randomFloat); + } else if (xContentType == XContentType.SMILE) { + //with SMILE we get back a double + expectedParsedValues.add(randomFloat.doubleValue()); + } else { + //with JSON AND YAML we get back a double, but with float precision. + expectedParsedValues.add(Double.parseDouble(randomFloat.toString())); + } + break; + case 6: + boolean randomBoolean = random.nextBoolean(); + originalValues.add(randomBoolean); + expectedParsedValues.add(randomBoolean); + break; + case 7: + String randomString = random.nextBoolean() ? RandomStrings.randomAsciiOfLengthBetween(random, 3, 10 ) : + randomUnicodeOfLengthBetween(random, 3, 10); + originalValues.add(randomString); + expectedParsedValues.add(randomString); + break; + case 8: + byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); + BytesArray randomBytesArray = new BytesArray(randomBytes); + originalValues.add(randomBytesArray); + if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { + //JSON and YAML write the base64 format + expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); + } else { + //SMILE and CBOR write the original bytes as they support binary format + expectedParsedValues.add(randomBytesArray); + } + break; + default: + throw new UnsupportedOperationException(); + } + } + return Tuple.tuple(originalValues, expectedParsedValues); + } + + /** + * Returns a random source containing a random number of fields, objects and array, with maximum depth 5. + * + * @param random Random generator + */ + public static BytesReference randomSource(Random random) { + //the source can be stored in any format and eventually converted when retrieved depending on the format of the response + XContentType xContentType = RandomPicks.randomFrom(random, XContentType.values()); + try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { + builder.startObject(); + addFields(random, builder, 0); + builder.endObject(); + return builder.bytes(); + } catch(IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Randomly adds fields, objects, or arrays to the provided builder. The maximum depth is 5. + */ + private static void addFields(Random random, XContentBuilder builder, int currentDepth) throws IOException { + int numFields = RandomNumbers.randomIntBetween(random, 1, 5); + for (int i = 0; i < numFields; i++) { + if (currentDepth < 5 && random.nextBoolean()) { + if (random.nextBoolean()) { + builder.startObject(RandomStrings.randomAsciiOfLengthBetween(random, 3, 10)); + addFields(random, builder, currentDepth + 1); + builder.endObject(); + } else { + builder.startArray(RandomStrings.randomAsciiOfLengthBetween(random, 3, 10)); + int numElements = RandomNumbers.randomIntBetween(random, 1, 5); + boolean object = random.nextBoolean(); + int dataType = -1; + if (object == false) { + dataType = randomDataType(random); + } + for (int j = 0; j < numElements; j++) { + if (object) { + builder.startObject(); + addFields(random, builder, 5); + builder.endObject(); + } else { + builder.value(randomFieldValue(random, dataType)); + } + } + builder.endArray(); + } + } else { + builder.field(RandomStrings.randomAsciiOfLengthBetween(random, 3, 10), + randomFieldValue(random, randomDataType(random))); + } + } + } + + private static int randomDataType(Random random) { + return RandomNumbers.randomIntBetween(random, 0, 3); + } + + private static Object randomFieldValue(Random random, int dataType) { + switch(dataType) { + case 0: + return RandomStrings.randomAsciiOfLengthBetween(random, 3, 10); + case 1: + return RandomStrings.randomAsciiOfLengthBetween(random, 3, 10); + case 2: + return random.nextLong(); + case 3: + return random.nextDouble(); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index bc560c9b0f0..6280de426ce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -55,6 +55,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchModule; @@ -73,8 +78,10 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Set; import static java.util.Collections.emptyList; @@ -766,4 +773,76 @@ public class ElasticsearchAssertions { assertFileExists(dir); assertThat("file [" + dir + "] should be a directory.", Files.isDirectory(dir), is(true)); } + + /** + * Asserts that the provided {@link BytesReference}s created through + * {@link org.elasticsearch.common.xcontent.ToXContent#toXContent(XContentBuilder, ToXContent.Params)} hold the same content. + * The comparison is done by parsing both into a map and comparing those two, so that keys ordering doesn't matter. + * Also binary values (byte[]) are properly compared through arrays comparisons. + */ + public static void assertToXContentEquivalent(BytesReference expected, BytesReference actual, XContentType xContentType) + throws IOException { + //we tried comparing byte per byte, but that didn't fly for a couple of reasons: + //1) whenever anything goes through a map while parsing, ordering is not preserved, which is perfectly ok + //2) Jackson SMILE parser parses floats as double, which then get printed out as double (with double precision) + //Note that byte[] holding binary values need special treatment as they need to be properly compared item per item. + try (XContentParser actualParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, actual)) { + Map actualMap = actualParser.map(); + try (XContentParser expectedParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, expected)) { + Map expectedMap = expectedParser.map(); + assertMapEquals(expectedMap, actualMap); + } + } + } + + /** + * Compares two maps recursively, using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) + */ + @SuppressWarnings("unchecked") + private static void assertMapEquals(Map expected, Map actual) { + assertEquals(expected.size(), actual.size()); + for (Map.Entry expectedEntry : expected.entrySet()) { + String expectedKey = expectedEntry.getKey(); + Object expectedValue = expectedEntry.getValue(); + if (expectedValue == null) { + assertTrue(actual.get(expectedKey) == null && actual.containsKey(expectedKey)); + } else { + Object actualValue = actual.get(expectedKey); + assertObjectEquals(expectedValue, actualValue); + } + } + } + + /** + * Compares two lists recursively, but using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) + */ + @SuppressWarnings("unchecked") + private static void assertListEquals(List expected, List actual) { + assertEquals(expected.size(), actual.size()); + Iterator actualIterator = actual.iterator(); + for (Object expectedValue : expected) { + Object actualValue = actualIterator.next(); + assertObjectEquals(expectedValue, actualValue); + } + } + + /** + * Compares two objects, recursively walking eventual maps and lists encountered, and using arrays comparisons + * for byte[] through Arrays.equals(byte[], byte[]) + */ + @SuppressWarnings("unchecked") + private static void assertObjectEquals(Object expected, Object actual) { + if (expected instanceof Map) { + assertThat(actual, instanceOf(Map.class)); + assertMapEquals((Map) expected, (Map) actual); + } else if (expected instanceof List) { + assertListEquals((List) expected, (List) actual); + } else if (expected instanceof byte[]) { + //byte[] is really a special case for binary values when comparing SMILE and CBOR, arrays of other types + //don't need to be handled. Ordinary arrays get parsed as lists. + assertArrayEquals((byte[]) expected, (byte[]) actual); + } else { + assertEquals(expected, actual); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index f291e0bbeec..ae8c4c82c46 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestRequest; import java.util.HashMap; @@ -33,11 +34,12 @@ public class FakeRestRequest extends RestRequest { public FakeRestRequest() { - this(new HashMap<>(), new HashMap<>(), null, Method.GET, "/"); + this(NamedXContentRegistry.EMPTY, new HashMap<>(), new HashMap<>(), null, Method.GET, "/"); } - private FakeRestRequest(Map headers, Map params, BytesReference content, Method method, String path) { - super(params, path); + private FakeRestRequest(NamedXContentRegistry xContentRegistry, Map headers, Map params, + BytesReference content, Method method, String path) { + super(xContentRegistry, params, path); this.headers = headers; this.content = content; this.method = method; @@ -74,6 +76,7 @@ public class FakeRestRequest extends RestRequest { } public static class Builder { + private final NamedXContentRegistry xContentRegistry; private Map headers = new HashMap<>(); @@ -85,6 +88,10 @@ public class FakeRestRequest extends RestRequest { private Method method = Method.GET; + public Builder(NamedXContentRegistry xContentRegistry) { + this.xContentRegistry = xContentRegistry; + } + public Builder withHeaders(Map headers) { this.headers = headers; return this; @@ -111,7 +118,7 @@ public class FakeRestRequest extends RestRequest { } public FakeRestRequest build() { - return new FakeRestRequest(headers, params, content, method, path); + return new FakeRestRequest(xContentRegistry, headers, params, content, method, path); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 24f977c2740..e4434d1365b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -182,7 +182,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { protected void afterIfFailed(List errors) { // Dump the stash on failure. Instead of dumping it in true json we escape `\n`s so stack traces are easier to read logger.info("Stash dump on failure [{}]", - XContentHelper.toString(restTestExecutionContext.stash()).replace("\\n", "\n").replace("\\t", "\t")); + XContentHelper.toString(restTestExecutionContext.stash()).replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t")); super.afterIfFailed(errors); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java index 265fd7b3e85..e2a41365032 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.rest.yaml; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,7 +35,7 @@ public class ObjectPath { private final Object object; public static ObjectPath createFromXContent(XContent xContent, String input) throws IOException { - try (XContentParser parser = xContent.createParser(input)) { + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, input)) { if (parser.nextToken() == XContentParser.Token.START_ARRAY) { return new ObjectPath(parser.listOrderedMap()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java index 65277888471..3c09452fc8f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.rest.yaml.parser; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; @@ -57,7 +58,7 @@ public class ClientYamlTestSuiteParser implements ClientYamlTestFragmentParser if ("body".equals(paramName)) { String body = parser.text(); XContentType bodyContentType = XContentFactory.xContentType(body); - XContentParser bodyParser = XContentFactory.xContent(bodyContentType).createParser(body); + XContentParser bodyParser = XContentFactory.xContent(bodyContentType).createParser( + NamedXContentRegistry.EMPTY, body); //multiple bodies are supported e.g. in case of bulk provided as a whole string while(bodyParser.nextToken() != null) { apiCallSection.addBody(bodyParser.mapOrdered()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index 9839bb089ae..4efed709e22 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.rest.yaml.restspec; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.rest.yaml.FileUtils; @@ -65,7 +66,7 @@ public class ClientYamlSuiteRestSpec { for (String path : paths) { for (Path jsonFile : FileUtils.findJsonSpec(fileSystem, optionalPathPrefix, path)) { try (InputStream stream = Files.newInputStream(jsonFile)) { - try (XContentParser parser = JsonXContent.jsonXContent.createParser(stream)) { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, stream)) { ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); String filename = jsonFile.getFileName().toString(); String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index 0776fa1edb2..b000c42d41a 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -35,7 +35,7 @@ public class MockSearchServiceTests extends ESTestCase { public void testAssertNoInFlightContext() { final long nowInMillis = randomPositiveLong(); SearchContext s = new TestSearchContext(new QueryShardContext(0, new IndexSettings(IndexMetaData.PROTO, Settings.EMPTY), null, null, - null, null, null, null, null, null, () -> nowInMillis)) { + null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis)) { @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("node", new Index("idx", "ignored"), 0);