diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5cdc99c9be3..dd6c093047a 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -368,7 +368,8 @@ These are the linux flavors the Vagrantfile currently supports: * debian-8 aka jessie, the current debian stable distribution * centos-6 * centos-7 -* fedora-22 +* fedora-24 +* oel-6 aka Oracle Enterprise Linux 6 * oel-7 aka Oracle Enterprise Linux 7 * sles-12 * opensuse-13 @@ -377,7 +378,6 @@ We're missing the following from the support matrix because there aren't high quality boxes available in vagrant atlas: * sles-11 -* oel-6 We're missing the follow because our tests are very linux/bash centric: diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 44eb050dfb5..ad3a8e98ca6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -148,6 +148,9 @@ class PrecommitTasks { checkstyleTask.dependsOn(task) task.dependsOn(copyCheckstyleConf) task.inputs.file(checkstyleSuppressions) + task.reports { + html.enabled false + } } } return checkstyleTask diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java index 814c05889b2..2c6b88954a3 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -72,7 +72,7 @@ public class RestNoopBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, null, true); + bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true); // short circuit the call to the transport layer BulkRestBuilderListener listener = new BulkRestBuilderListener(channel, request); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index dc77a1a6e84..670812a0b48 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -216,7 +216,7 @@ public class TransportGetTaskAction extends HandledTransportAction implements CompositeIndicesRequest, WriteRequest { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(BulkRequest.class)); private static final int REQUEST_OVERHEAD = 50; @@ -257,17 +262,17 @@ public class BulkRequest extends ActionRequest implements Composite * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { - return add(data, defaultIndex, defaultType, null, null, null, null, true); + return add(data, defaultIndex, defaultType, null, null, null, null, null, true); } /** * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception { - return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex); + return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex); } - public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { XContent xContent = XContentFactory.xContent(data); int line = 0; int from = 0; @@ -301,6 +306,7 @@ public class BulkRequest extends ActionRequest implements Composite String id = null; String routing = defaultRouting; String parent = null; + FetchSourceContext fetchSourceContext = defaultFetchSourceContext; String[] fields = defaultFields; String timestamp = null; TimeValue ttl = null; @@ -353,16 +359,21 @@ public class BulkRequest extends ActionRequest implements Composite pipeline = parser.text(); } else if ("fields".equals(currentFieldName)) { throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected"); + } else if ("_source".equals(currentFieldName)) { + fetchSourceContext = FetchSourceContext.parse(parser); } else { throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("fields".equals(currentFieldName)) { + DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); List values = parser.list(); fields = values.toArray(new String[values.size()]); } else { throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } + } else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) { + fetchSourceContext = FetchSourceContext.parse(parser); } else if (token != XContentParser.Token.VALUE_NULL) { throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } @@ -402,7 +413,10 @@ public class BulkRequest extends ActionRequest implements Composite .version(version).versionType(versionType) .routing(routing) .parent(parent) - .source(data.slice(from, nextMarker - from)); + .fromXContent(data.slice(from, nextMarker - from)); + if (fetchSourceContext != null) { + updateRequest.fetchSource(fetchSourceContext); + } if (fields != null) { updateRequest.fields(fields); } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 754316f3de0..adb6ea58ea0 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -251,7 +251,8 @@ public class TransportShardBulkAction extends TransportWriteAction 0) { + if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) || + (updateRequest.fields() != null && updateRequest.fields().length > 0)) { Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); } diff --git a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index 851d9e6573d..fef1b307e99 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -40,7 +40,7 @@ public class ExplainRequest extends SingleShardRequest { private String routing; private String preference; private QueryBuilder query; - private String[] fields; + private String[] storedFields; private FetchSourceContext fetchSourceContext; private String[] filteringAlias = Strings.EMPTY_ARRAY; @@ -122,12 +122,12 @@ public class ExplainRequest extends SingleShardRequest { } - public String[] fields() { - return fields; + public String[] storedFields() { + return storedFields; } - public ExplainRequest fields(String[] fields) { - this.fields = fields; + public ExplainRequest storedFields(String[] fields) { + this.storedFields = fields; return this; } @@ -167,8 +167,8 @@ public class ExplainRequest extends SingleShardRequest { preference = in.readOptionalString(); query = in.readNamedWriteable(QueryBuilder.class); filteringAlias = in.readStringArray(); - fields = in.readOptionalStringArray(); - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + storedFields = in.readOptionalStringArray(); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); nowInMillis = in.readVLong(); } @@ -181,8 +181,8 @@ public class ExplainRequest extends SingleShardRequest { out.writeOptionalString(preference); out.writeNamedWriteable(query); out.writeStringArray(filteringAlias); - out.writeOptionalStringArray(fields); - out.writeOptionalStreamable(fetchSourceContext); + out.writeOptionalStringArray(storedFields); + out.writeOptionalWriteable(fetchSourceContext); out.writeVLong(nowInMillis); } } diff --git a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java index c201315cbd8..cf7b4821816 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java @@ -88,10 +88,10 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder implements Realti private String parent; private String preference; - private String[] fields; + private String[] storedFields; private FetchSourceContext fetchSourceContext; @@ -186,20 +186,20 @@ public class GetRequest extends SingleShardRequest implements Realti } /** - * Explicitly specify the fields that will be returned. By default, the _source + * Explicitly specify the stored fields that will be returned. By default, the _source * field will be returned. */ - public GetRequest fields(String... fields) { - this.fields = fields; + public GetRequest storedFields(String... fields) { + this.storedFields = fields; return this; } /** - * Explicitly specify the fields that will be returned. By default, the _source + * Explicitly specify the stored fields that will be returned. By default, the _source * field will be returned. */ - public String[] fields() { - return this.fields; + public String[] storedFields() { + return this.storedFields; } /** @@ -260,18 +260,12 @@ public class GetRequest extends SingleShardRequest implements Realti parent = in.readOptionalString(); preference = in.readOptionalString(); refresh = in.readBoolean(); - int size = in.readInt(); - if (size >= 0) { - fields = new String[size]; - for (int i = 0; i < size; i++) { - fields[i] = in.readString(); - } - } + storedFields = in.readOptionalStringArray(); realtime = in.readBoolean(); this.versionType = VersionType.fromValue(in.readByte()); this.version = in.readLong(); - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); } @Override @@ -284,18 +278,11 @@ public class GetRequest extends SingleShardRequest implements Realti out.writeOptionalString(preference); out.writeBoolean(refresh); - if (fields == null) { - out.writeInt(-1); - } else { - out.writeInt(fields.length); - for (String field : fields) { - out.writeString(field); - } - } + out.writeOptionalStringArray(storedFields); out.writeBoolean(realtime); out.writeByte(versionType.getValue()); out.writeLong(version); - out.writeOptionalStreamable(fetchSourceContext); + out.writeOptionalWriteable(fetchSourceContext); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java index a3f070fd2ef..f56905d86ee 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java @@ -88,8 +88,8 @@ public class GetRequestBuilder extends SingleShardOperationRequestBuilder_source * field will be returned. */ - public GetRequestBuilder setFields(String... fields) { - request.fields(fields); + public GetRequestBuilder setStoredFields(String... fields) { + request.storedFields(fields); return this; } diff --git a/core/src/main/java/org/elasticsearch/action/get/GetResponse.java b/core/src/main/java/org/elasticsearch/action/get/GetResponse.java index 5741984d35f..87cc42f9d27 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -134,14 +134,26 @@ public class GetResponse extends ActionResponse implements Iterable, T return getResult.getSource(); } + /** + * @deprecated Use {@link GetResponse#getSource()} instead + */ + @Deprecated public Map getFields() { return getResult.getFields(); } + /** + * @deprecated Use {@link GetResponse#getSource()} instead + */ + @Deprecated public GetField getField(String name) { return getResult.field(name); } + /** + * @deprecated Use {@link GetResponse#getSource()} instead + */ + @Deprecated @Override public Iterator iterator() { return getResult.iterator(); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index ee72a83a5fe..1decd8ce946 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.RealtimeRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -58,7 +59,7 @@ public class MultiGetRequest extends ActionRequest implements I private String id; private String routing; private String parent; - private String[] fields; + private String[] storedFields; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; private FetchSourceContext fetchSourceContext; @@ -136,13 +137,13 @@ public class MultiGetRequest extends ActionRequest implements I return parent; } - public Item fields(String... fields) { - this.fields = fields; + public Item storedFields(String... fields) { + this.storedFields = fields; return this; } - public String[] fields() { - return this.fields; + public String[] storedFields() { + return this.storedFields; } public long version() { @@ -188,17 +189,11 @@ public class MultiGetRequest extends ActionRequest implements I id = in.readString(); routing = in.readOptionalString(); parent = in.readOptionalString(); - int size = in.readVInt(); - if (size > 0) { - fields = new String[size]; - for (int i = 0; i < size; i++) { - fields[i] = in.readString(); - } - } + storedFields = in.readOptionalStringArray(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); } @Override @@ -208,19 +203,11 @@ public class MultiGetRequest extends ActionRequest implements I out.writeString(id); out.writeOptionalString(routing); out.writeOptionalString(parent); - if (fields == null) { - out.writeVInt(0); - } else { - out.writeVInt(fields.length); - for (String field : fields) { - out.writeString(field); - } - } - + out.writeOptionalStringArray(storedFields); out.writeLong(version); out.writeByte(versionType.getValue()); - out.writeOptionalStreamable(fetchSourceContext); + out.writeOptionalWriteable(fetchSourceContext); } @Override @@ -233,7 +220,7 @@ public class MultiGetRequest extends ActionRequest implements I if (version != item.version) return false; if (fetchSourceContext != null ? !fetchSourceContext.equals(item.fetchSourceContext) : item.fetchSourceContext != null) return false; - if (!Arrays.equals(fields, item.fields)) return false; + if (!Arrays.equals(storedFields, item.storedFields)) return false; if (!id.equals(item.id)) return false; if (!index.equals(item.index)) return false; if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false; @@ -251,7 +238,7 @@ public class MultiGetRequest extends ActionRequest implements I result = 31 * result + id.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + (parent != null ? parent.hashCode() : 0); - result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0); + result = 31 * result + (storedFields != null ? Arrays.hashCode(storedFields) : 0); result = 31 * result + Long.hashCode(version); result = 31 * result + versionType.hashCode(); result = 31 * result + (fetchSourceContext != null ? fetchSourceContext.hashCode() : 0); @@ -379,7 +366,7 @@ public class MultiGetRequest extends ActionRequest implements I String id = null; String routing = defaultRouting; String parent = null; - List fields = null; + List storedFields = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; @@ -403,8 +390,11 @@ public class MultiGetRequest extends ActionRequest implements I } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { parent = parser.text(); } else if ("fields".equals(currentFieldName)) { - fields = new ArrayList<>(); - fields.add(parser.text()); + throw new ParsingException(parser.getTokenLocation(), + "Unsupported field [fields] used, expected [stored_fields] instead"); + } else if ("stored_fields".equals(currentFieldName)) { + storedFields = new ArrayList<>(); + storedFields.add(parser.text()); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { version = parser.longValue(); } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { @@ -420,9 +410,12 @@ public class MultiGetRequest extends ActionRequest implements I } } else if (token == XContentParser.Token.START_ARRAY) { if ("fields".equals(currentFieldName)) { - fields = new ArrayList<>(); + throw new ParsingException(parser.getTokenLocation(), + "Unsupported field [fields] used, expected [stored_fields] instead"); + } else if ("stored_fields".equals(currentFieldName)) { + storedFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - fields.add(parser.text()); + storedFields.add(parser.text()); } } else if ("_source".equals(currentFieldName)) { ArrayList includes = new ArrayList<>(); @@ -464,12 +457,12 @@ public class MultiGetRequest extends ActionRequest implements I } } String[] aFields; - if (fields != null) { - aFields = fields.toArray(new String[fields.size()]); + if (storedFields != null) { + aFields = storedFields.toArray(new String[storedFields.size()]); } else { aFields = defaultFields; } - items.add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent).version(version).versionType(versionType) + items.add(new Item(index, type, id).routing(routing).storedFields(aFields).parent(parent).version(version).versionType(versionType) .fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext)); } } @@ -484,7 +477,7 @@ public class MultiGetRequest extends ActionRequest implements I if (!token.isValue()) { throw new IllegalArgumentException("ids array element should only contain ids"); } - items.add(new Item(defaultIndex, defaultType, parser.text()).fields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting)); + items.add(new Item(defaultIndex, defaultType, parser.text()).storedFields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting)); } } diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index a1c34f33fa5..6b9de7ecf64 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -92,7 +92,7 @@ public class TransportGetAction extends TransportSingleShardAction()); - transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME, - new FreeContextTransportHandler<>()); - transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, ClearScrollContextsRequest::new, ThreadPool.Names.SAME, - new ClearScrollContextsTransportHandler()); - transportService.registerRequestHandler(DFS_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, - new SearchDfsTransportHandler()); - transportService.registerRequestHandler(QUERY_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryTransportHandler()); - transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryByIdTransportHandler()); - transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryScrollTransportHandler()); - transportService.registerRequestHandler(QUERY_FETCH_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryFetchTransportHandler()); - transportService.registerRequestHandler(QUERY_QUERY_FETCH_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryQueryFetchTransportHandler()); - transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, - new SearchQueryFetchScrollTransportHandler()); - transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ShardFetchRequest::new, ThreadPool.Names.SEARCH, - new FetchByIdTransportHandler<>()); - transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ShardFetchSearchRequest::new, ThreadPool.Names.SEARCH, - new FetchByIdTransportHandler<>()); } public void sendFreeContext(DiscoveryNode node, final long contextId, SearchRequest request) { @@ -124,8 +96,8 @@ public class SearchTransportService extends AbstractComponent { } public void sendClearAllScrollContexts(DiscoveryNode node, final ActionListener listener) { - transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(), - new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE)); + transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, TransportRequest.Empty.INSTANCE, + new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE)); } public void sendExecuteDfs(DiscoveryNode node, final ShardSearchTransportRequest request, @@ -278,87 +250,66 @@ public class SearchTransportService extends AbstractComponent { } } - class FreeContextTransportHandler - implements TransportRequestHandler { - @Override - public void messageReceived(FreeContextRequest request, TransportChannel channel) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - } - - static class ClearScrollContextsRequest extends TransportRequest { - } - - class ClearScrollContextsTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(ClearScrollContextsRequest request, TransportChannel channel) throws Exception { - searchService.freeAllScrollContexts(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - } - - class SearchDfsTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception { - DfsSearchResult result = searchService.executeDfsPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception { - QuerySearchResultProvider result = searchService.executeQueryPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryByIdTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception { - QuerySearchResult result = searchService.executeQueryPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryScrollTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception { - ScrollQuerySearchResult result = searchService.executeQueryPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryFetchTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception { - QueryFetchSearchResult result = searchService.executeFetchPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryQueryFetchTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception { - QueryFetchSearchResult result = searchService.executeFetchPhase(request); - channel.sendResponse(result); - } - } - - class FetchByIdTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(Request request, TransportChannel channel) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request); - channel.sendResponse(result); - } - } - - class SearchQueryFetchScrollTransportHandler implements TransportRequestHandler { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception { - ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request); - channel.sendResponse(result); - } + public static void registerRequestHandler(TransportService transportService, SearchService searchService) { + transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ScrollFreeContextRequest::new, ThreadPool.Names.SAME, + ((request, channel) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + })); + transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME, + (request, channel) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); + transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, + ThreadPool.Names.SAME, (request, channel) -> { + searchService.freeAllScrollContexts(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); + transportService.registerRequestHandler(DFS_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + DfsSearchResult result = searchService.executeDfsPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + QuerySearchResultProvider result = searchService.executeQueryPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + QuerySearchResult result = searchService.executeQueryPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + ScrollQuerySearchResult result = searchService.executeQueryPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_FETCH_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + QueryFetchSearchResult result = searchService.executeFetchPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_QUERY_FETCH_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + QueryFetchSearchResult result = searchService.executeFetchPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ShardFetchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + FetchSearchResult result = searchService.executeFetchPhase(request); + channel.sendResponse(result); + }); + transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ShardFetchSearchRequest::new, ThreadPool.Names.SEARCH, + (request, channel) -> { + FetchSearchResult result = searchService.executeFetchPhase(request); + channel.sendResponse(result); + }); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index cb7e7531d2d..ef3815b1b32 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; -import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -44,8 +43,6 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId; -/** - */ public class TransportClearScrollAction extends HandledTransportAction { private final ClusterService clusterService; @@ -53,11 +50,11 @@ public class TransportClearScrollAction extends HandledTransportAction i super(item.index()); this.id = item.id(); this.type = item.type(); - this.selectedFields(item.fields()); + this.selectedFields(item.storedFields()); this.routing(item.routing()); this.parent(item.parent()); } diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index d35c7bdb584..e5322f51d50 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -186,7 +186,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio @Override public void onResponse(IndexResponse response) { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); - if (request.fields() != null && request.fields().length > 0) { + if ((request.fetchSource() != null && request.fetchSource().fetchSource()) || + (request.fields() != null && request.fields().length > 0)) { Tuple> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); } else { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index a893d0fa2d7..49206470532 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.update; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -28,9 +29,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -51,6 +54,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -267,17 +271,19 @@ public class UpdateHelper extends AbstractComponent { } /** - * Extracts the fields from the updated document to be returned in a update response + * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. + * For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response */ public GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, final Map source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { - if (request.fields() == null || request.fields().length == 0) { + if ((request.fields() == null || request.fields().length == 0) && + (request.fetchSource() == null || request.fetchSource().fetchSource() == false)) { return null; } + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(source); boolean sourceRequested = false; Map fields = null; if (request.fields() != null && request.fields().length > 0) { - SourceLookup sourceLookup = new SourceLookup(); - sourceLookup.setSource(source); for (String field : request.fields()) { if (field.equals("_source")) { sourceRequested = true; @@ -298,8 +304,26 @@ public class UpdateHelper extends AbstractComponent { } } + BytesReference sourceFilteredAsBytes = sourceAsBytes; + if (request.fetchSource() != null && request.fetchSource().fetchSource()) { + sourceRequested = true; + if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) { + Object value = sourceLookup.filter(request.fetchSource().includes(), request.fetchSource().excludes()); + try { + final int initialCapacity = Math.min(1024, sourceAsBytes.length()); + BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); + try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { + builder.value(value); + sourceFilteredAsBytes = builder.bytes(); + } + } catch (IOException e) { + throw new ElasticsearchException("Error filtering source", e); + } + } + } + // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) - return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceRequested ? sourceAsBytes : null, fields); + return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceRequested ? sourceFilteredAsBytes : null, fields); } public static class Result { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 3f2dde6784a..9fe5cd892dc 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -32,6 +32,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -42,6 +44,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; import java.util.Collections; @@ -55,6 +58,8 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; */ public class UpdateRequest extends InstanceShardOperationRequest implements DocumentRequest, WriteRequest { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(UpdateRequest.class)); private String type; private String id; @@ -68,6 +73,7 @@ public class UpdateRequest extends InstanceShardOperationRequest Script script; private String[] fields; + private FetchSourceContext fetchSourceContext; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; @@ -373,17 +379,80 @@ public class UpdateRequest extends InstanceShardOperationRequest /** * Explicitly specify the fields that will be returned. By default, nothing is returned. + * @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead */ + @Deprecated public UpdateRequest fields(String... fields) { this.fields = fields; return this; } /** - * Get the fields to be returned. + * Indicate that _source should be returned with every hit, with an + * "include" and/or "exclude" set which can include simple wildcard + * elements. + * + * @param include + * An optional include (optionally wildcarded) pattern to filter + * the returned _source + * @param exclude + * An optional exclude (optionally wildcarded) pattern to filter + * the returned _source */ + public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) { + this.fetchSourceContext = new FetchSourceContext(include, exclude); + return this; + } + + /** + * Indicate that _source should be returned, with an + * "include" and/or "exclude" set which can include simple wildcard + * elements. + * + * @param includes + * An optional list of include (optionally wildcarded) pattern to + * filter the returned _source + * @param excludes + * An optional list of exclude (optionally wildcarded) pattern to + * filter the returned _source + */ + public UpdateRequest fetchSource(@Nullable String[] includes, @Nullable String[] excludes) { + this.fetchSourceContext = new FetchSourceContext(includes, excludes); + return this; + } + + /** + * Indicates whether the response should contain the updated _source. + */ + public UpdateRequest fetchSource(boolean fetchSource) { + this.fetchSourceContext = new FetchSourceContext(fetchSource); + return this; + } + + /** + * Explicitely set the fetch source context for this request + */ + public UpdateRequest fetchSource(FetchSourceContext context) { + this.fetchSourceContext = context; + return this; + } + + + /** + * Get the fields to be returned. + * @deprecated Use {@link UpdateRequest#fetchSource()} instead + */ + @Deprecated public String[] fields() { - return this.fields; + return fields; + } + + /** + * Gets the {@link FetchSourceContext} which defines how the _source should + * be fetched. + */ + public FetchSourceContext fetchSource() { + return fetchSourceContext; } /** @@ -618,16 +687,16 @@ public class UpdateRequest extends InstanceShardOperationRequest return upsertRequest; } - public UpdateRequest source(XContentBuilder source) throws Exception { - return source(source.bytes()); + public UpdateRequest fromXContent(XContentBuilder source) throws Exception { + return fromXContent(source.bytes()); } - public UpdateRequest source(byte[] source) throws Exception { - return source(source, 0, source.length); + public UpdateRequest fromXContent(byte[] source) throws Exception { + return fromXContent(source, 0, source.length); } - public UpdateRequest source(byte[] source, int offset, int length) throws Exception { - return source(new BytesArray(source, offset, length)); + public UpdateRequest fromXContent(byte[] source, int offset, int length) throws Exception { + return fromXContent(new BytesArray(source, offset, length)); } /** @@ -646,7 +715,7 @@ public class UpdateRequest extends InstanceShardOperationRequest return detectNoop; } - public UpdateRequest source(BytesReference source) throws Exception { + public UpdateRequest fromXContent(BytesReference source) throws Exception { Script script = null; try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { XContentParser.Token token = parser.nextToken(); @@ -685,6 +754,8 @@ public class UpdateRequest extends InstanceShardOperationRequest if (fields != null) { fields(fields.toArray(new String[fields.size()])); } + } else if ("_source".equals(currentFieldName)) { + fetchSourceContext = FetchSourceContext.parse(parser); } } if (script != null) { @@ -729,13 +800,8 @@ public class UpdateRequest extends InstanceShardOperationRequest doc = new IndexRequest(); doc.readFrom(in); } - int size = in.readInt(); - if (size >= 0) { - fields = new String[size]; - for (int i = 0; i < size; i++) { - fields[i] = in.readString(); - } - } + fields = in.readOptionalStringArray(); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); if (in.readBoolean()) { upsertRequest = new IndexRequest(); upsertRequest.readFrom(in); @@ -772,14 +838,8 @@ public class UpdateRequest extends InstanceShardOperationRequest doc.id(id); doc.writeTo(out); } - if (fields == null) { - out.writeInt(-1); - } else { - out.writeInt(fields.length); - for (String field : fields) { - out.writeString(field); - } - } + out.writeOptionalStringArray(fields); + out.writeOptionalWriteable(fetchSourceContext); if (upsertRequest == null) { out.writeBoolean(false); } else { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index f2d80bfe66e..bbbc9bafd8f 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -25,17 +25,22 @@ import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.script.Script; import java.util.Map; public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder implements WriteRequestBuilder { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class)); public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) { super(client, action, new UpdateRequest()); @@ -90,12 +95,57 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder esSettings) throws BootstrapException, NodeValidationException { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); @@ -259,8 +259,9 @@ final class Bootstrap { } } + final boolean closeStandardStreams = (foreground == false) || quiet; try { - if (!foreground) { + if (closeStandardStreams) { final Logger rootLogger = ESLoggerFactory.getRootLogger(); final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class); if (maybeConsoleAppender != null) { @@ -285,7 +286,7 @@ final class Bootstrap { INSTANCE.start(); - if (!foreground) { + if (closeStandardStreams) { closeSysError(); } } catch (NodeValidationException | RuntimeException e) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java index d28abb9f6ac..540a732dfae 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapException.java @@ -26,7 +26,7 @@ import java.util.Map; * Wrapper exception for checked exceptions thrown during the bootstrap process. Methods invoked * during bootstrap should explicitly declare the checked exceptions that they can throw, rather * than declaring the top-level checked exception {@link Exception}. This exception exists to wrap - * these checked exceptions so that {@link Bootstrap#init(boolean, Path, Map)} does not have to + * these checked exceptions so that {@link Bootstrap#init(boolean, Path, boolean, Map)} does not have to * declare all of these checked exceptions. */ class BootstrapException extends Exception { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 64e7350b5fa..43160ee8c9b 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -44,6 +44,7 @@ class Elasticsearch extends SettingCommand { private final OptionSpecBuilder versionOption; private final OptionSpecBuilder daemonizeOption; private final OptionSpec pidfileOption; + private final OptionSpecBuilder quietOption; // visible for testing Elasticsearch() { @@ -58,6 +59,10 @@ class Elasticsearch extends SettingCommand { .availableUnless(versionOption) .withRequiredArg() .withValuesConvertedBy(new PathConverter()); + quietOption = parser.acceptsAll(Arrays.asList("q", "quiet"), + "Turns off standard ouput/error streams logging in console") + .availableUnless(versionOption) + .availableUnless(daemonizeOption); } /** @@ -92,17 +97,19 @@ class Elasticsearch extends SettingCommand { final boolean daemonize = options.has(daemonizeOption); final Path pidFile = pidfileOption.value(options); + final boolean quiet = options.has(quietOption); try { - init(daemonize, pidFile, settings); + init(daemonize, pidFile, quiet, settings); } catch (NodeValidationException e) { throw new UserException(ExitCodes.CONFIG, e.getMessage()); } } - void init(final boolean daemonize, final Path pidFile, final Map esSettings) throws NodeValidationException { + void init(final boolean daemonize, final Path pidFile, final boolean quiet, final Map esSettings) + throws NodeValidationException { try { - Bootstrap.init(!daemonize, pidFile, esSettings); + Bootstrap.init(!daemonize, pidFile, quiet, esSettings); } catch (BootstrapException | RuntimeException e) { // format exceptions to the console in a special way // to avoid 2MB stacktraces from guice, etc. diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index f7ce9f929bd..c5dd158a3f1 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -136,7 +136,6 @@ public abstract class TransportClient extends AbstractClient { } modules.add(networkModule); modules.add(b -> b.bind(ThreadPool.class).toInstance(threadPool)); - modules.add(searchModule); ActionModule actionModule = new ActionModule(false, true, settings, null, settingsModule.getClusterSettings(), pluginsService.filterPlugins(ActionPlugin.class)); modules.add(actionModule); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 8b3cbec0ebd..f24c1bba3f8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -500,15 +500,6 @@ public class MetaDataCreateIndexService extends AbstractComponent { validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]"); } } - //norelease - this can be removed? - Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); - Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null); - if (number_of_primaries != null && number_of_primaries <= 0) { - validationErrors.add("index must have 1 or more primary shards"); - } - if (number_of_replicas != null && number_of_replicas < 0) { - validationErrors.add("index must have 0 or more replica shards"); - } return validationErrors; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index b88aa7b86ac..4ffcf33097f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; @@ -63,15 +64,21 @@ public class MetaDataIndexTemplateService extends AbstractComponent { private final IndicesService indicesService; private final MetaDataCreateIndexService metaDataCreateIndexService; private final NodeServicesProvider nodeServicesProvider; + private final IndexScopedSettings indexScopedSettings; @Inject - public MetaDataIndexTemplateService(Settings settings, ClusterService clusterService, MetaDataCreateIndexService metaDataCreateIndexService, AliasValidator aliasValidator, IndicesService indicesService, NodeServicesProvider nodeServicesProvider) { + public MetaDataIndexTemplateService(Settings settings, ClusterService clusterService, + MetaDataCreateIndexService metaDataCreateIndexService, + AliasValidator aliasValidator, IndicesService indicesService, + NodeServicesProvider nodeServicesProvider, + IndexScopedSettings indexScopedSettings) { super(settings); this.clusterService = clusterService; this.aliasValidator = aliasValidator; this.indicesService = indicesService; this.metaDataCreateIndexService = metaDataCreateIndexService; this.nodeServicesProvider = nodeServicesProvider; + this.indexScopedSettings = indexScopedSettings; } public void removeTemplates(final RemoveRequest request, final RemoveListener listener) { @@ -260,6 +267,14 @@ public class MetaDataIndexTemplateService extends AbstractComponent { validationErrors.add("template must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } + try { + indexScopedSettings.validate(request.settings); + } catch (IllegalArgumentException iae) { + validationErrors.add(iae.getMessage()); + for (Throwable t : iae.getSuppressed()) { + validationErrors.add(t.getMessage()); + } + } List indexSettingsValidation = metaDataCreateIndexService.getIndexSettingsValidationErrors(request.settings); validationErrors.addAll(indexSettingsValidation); if (!validationErrors.isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 0c39c43bc9f..55be77d201f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -96,7 +96,7 @@ public class DiscoveryNode implements Writeable, ToXContent { * @param version the version of the node */ public DiscoveryNode(final String id, TransportAddress address, Version version) { - this(id, address, Collections.emptyMap(), Collections.emptySet(), version); + this(id, address, Collections.emptyMap(), EnumSet.allOf(Role.class), version); } /** diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 504fc413137..15e2fb4fabb 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -19,8 +19,15 @@ package org.elasticsearch.common.geo; +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.LatLonPoint; +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.BytesRef; + +import java.util.Arrays; import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; @@ -88,6 +95,24 @@ public final class GeoPoint { return this; } + // todo this is a crutch because LatLonPoint doesn't have a helper for returning .stringValue() + // todo remove with next release of lucene + public GeoPoint resetFromIndexableField(IndexableField field) { + if (field instanceof LatLonPoint) { + BytesRef br = field.binaryValue(); + byte[] bytes = Arrays.copyOfRange(br.bytes, br.offset, br.length); + return this.reset( + GeoEncodingUtils.decodeLatitude(bytes, 0), + GeoEncodingUtils.decodeLongitude(bytes, Integer.BYTES)); + } else if (field instanceof LatLonDocValuesField) { + long encoded = (long)(field.numericValue()); + return this.reset( + GeoEncodingUtils.decodeLatitude((int)(encoded >>> 32)), + GeoEncodingUtils.decodeLongitude((int)encoded)); + } + return resetFromIndexHash(Long.parseLong(field.stringValue())); + } + public GeoPoint resetFromGeoHash(String geohash) { final long hash = mortonEncode(geohash); return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 853df3d31ad..548c1da5a8c 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -22,17 +22,18 @@ package org.elasticsearch.common.logging; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.MessageFactory; +import org.apache.logging.log4j.spi.ExtendedLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import java.util.Locale; -import java.util.function.Function; - /** * Factory to get {@link Logger}s */ -public abstract class ESLoggerFactory { +public final class ESLoggerFactory { + + private ESLoggerFactory() { + + } public static final Setting LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Property.NodeScope); @@ -41,40 +42,27 @@ public abstract class ESLoggerFactory { Property.Dynamic, Property.NodeScope); public static Logger getLogger(String prefix, String name) { - name = name.intern(); - final Logger logger = getLogger(new PrefixMessageFactory(), name); - final MessageFactory factory = logger.getMessageFactory(); - // in some cases, we initialize the logger before we are ready to set the prefix - // we can not re-initialize the logger, so the above getLogger might return an existing - // instance without the prefix set; thus, we hack around this by resetting the prefix - if (prefix != null && factory instanceof PrefixMessageFactory) { - ((PrefixMessageFactory) factory).setPrefix(prefix.intern()); - } - return logger; + return getLogger(prefix, LogManager.getLogger(name)); } - public static Logger getLogger(MessageFactory messageFactory, String name) { - return LogManager.getLogger(name, messageFactory); + public static Logger getLogger(String prefix, Class clazz) { + return getLogger(prefix, LogManager.getLogger(clazz)); + } + + public static Logger getLogger(String prefix, Logger logger) { + return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); + } + + public static Logger getLogger(Class clazz) { + return getLogger(null, clazz); } public static Logger getLogger(String name) { - return getLogger((String)null, name); - } - - public static DeprecationLogger getDeprecationLogger(String name) { - return new DeprecationLogger(getLogger(name)); - } - - public static DeprecationLogger getDeprecationLogger(String prefix, String name) { - return new DeprecationLogger(getLogger(prefix, name)); + return getLogger(null, name); } public static Logger getRootLogger() { return LogManager.getRootLogger(); } - private ESLoggerFactory() { - // Utility class can't be built. - } - } diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index ffb78b2a615..d990a28ea46 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -30,7 +30,6 @@ import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration; import org.apache.logging.log4j.core.config.composite.CompositeConfiguration; import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration; import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; @@ -44,7 +43,6 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; -import java.util.Arrays; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -81,13 +79,14 @@ public class LogConfigurator { } if (ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings)); + final Level level = ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings); + Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); } final Map levels = settings.filter(ESLoggerFactory.LOG_LEVEL_SETTING::match).getAsMap(); for (String key : levels.keySet()) { final Level level = ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings); - Loggers.setLevel(Loggers.getLogger(key.substring("logger.".length())), level); + Loggers.setLevel(ESLoggerFactory.getLogger(key.substring("logger.".length())), level); } } diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java index 31abfb2ec6e..812a0b70f28 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -35,10 +35,12 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.Node; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; +import static javax.security.auth.login.Configuration.getConfiguration; import static org.elasticsearch.common.util.CollectionUtils.asArrayList; /** @@ -46,24 +48,8 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList; */ public class Loggers { - static final String commonPrefix = System.getProperty("es.logger.prefix", "org.elasticsearch."); - public static final String SPACE = " "; - private static boolean consoleLoggingEnabled = true; - - public static void disableConsoleLogging() { - consoleLoggingEnabled = false; - } - - public static void enableConsoleLogging() { - consoleLoggingEnabled = true; - } - - public static boolean consoleLoggingEnabled() { - return consoleLoggingEnabled; - } - public static Logger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) { return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } @@ -82,10 +68,16 @@ public class Loggers { } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - return getLogger(buildClassLoggerName(clazz), settings, prefixes); + final List prefixesList = prefixesList(settings, prefixes); + return getLogger(clazz, prefixesList.toArray(new String[prefixesList.size()])); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { + final List prefixesList = prefixesList(settings, prefixes); + return getLogger(loggerName, prefixesList.toArray(new String[prefixesList.size()])); + } + + private static List prefixesList(Settings settings, String... prefixes) { List prefixesList = new ArrayList<>(); if (Node.NODE_NAME_SETTING.exists(settings)) { prefixesList.add(Node.NODE_NAME_SETTING.get(settings)); @@ -93,26 +85,31 @@ public class Loggers { if (prefixes != null && prefixes.length > 0) { prefixesList.addAll(asList(prefixes)); } - return getLogger(getLoggerName(loggerName), prefixesList.toArray(new String[prefixesList.size()])); + return prefixesList; } public static Logger getLogger(Logger parentLogger, String s) { - return ESLoggerFactory.getLogger(parentLogger.getMessageFactory(), getLoggerName(parentLogger.getName() + s)); + assert parentLogger instanceof PrefixLogger; + return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s); } public static Logger getLogger(String s) { - return ESLoggerFactory.getLogger(getLoggerName(s)); + return ESLoggerFactory.getLogger(s); } public static Logger getLogger(Class clazz) { - return ESLoggerFactory.getLogger(getLoggerName(buildClassLoggerName(clazz))); + return ESLoggerFactory.getLogger(clazz); } public static Logger getLogger(Class clazz, String... prefixes) { - return getLogger(buildClassLoggerName(clazz), prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } public static Logger getLogger(String name, String... prefixes) { + return ESLoggerFactory.getLogger(formatPrefix(prefixes), name); + } + + private static String formatPrefix(String... prefixes) { String prefix = null; if (prefixes != null && prefixes.length > 0) { StringBuilder sb = new StringBuilder(); @@ -130,7 +127,7 @@ public class Loggers { prefix = sb.toString(); } } - return ESLoggerFactory.getLogger(prefix, getLoggerName(name)); + return prefix; } /** @@ -148,30 +145,23 @@ public class Loggers { } public static void setLevel(Logger logger, Level level) { - if (!"".equals(logger.getName())) { + if (!LogManager.ROOT_LOGGER_NAME.equals(logger.getName())) { Configurator.setLevel(logger.getName(), level); } else { - LoggerContext ctx = LoggerContext.getContext(false); - Configuration config = ctx.getConfiguration(); - LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName()); + final LoggerContext ctx = LoggerContext.getContext(false); + final Configuration config = ctx.getConfiguration(); + final LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName()); loggerConfig.setLevel(level); ctx.updateLoggers(); } - } - private static String buildClassLoggerName(Class clazz) { - String name = clazz.getName(); - if (name.startsWith("org.elasticsearch.")) { - name = Classes.getPackageName(clazz); + // we have to descend the hierarchy + final LoggerContext ctx = LoggerContext.getContext(false); + for (final LoggerConfig loggerConfig : ctx.getConfiguration().getLoggers().values()) { + if (LogManager.ROOT_LOGGER_NAME.equals(logger.getName()) || loggerConfig.getName().startsWith(logger.getName() + ".")) { + Configurator.setLevel(loggerConfig.getName(), level); + } } - return name; - } - - private static String getLoggerName(String name) { - if (name.startsWith("org.elasticsearch.")) { - name = name.substring("org.elasticsearch.".length()); - } - return commonPrefix + name; } public static void addAppender(final Logger logger, final Appender appender) { diff --git a/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java b/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java new file mode 100644 index 00000000000..32de2afde36 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Marker; +import org.apache.logging.log4j.MarkerManager; +import org.apache.logging.log4j.message.Message; +import org.apache.logging.log4j.spi.ExtendedLogger; +import org.apache.logging.log4j.spi.ExtendedLoggerWrapper; + +import java.lang.ref.WeakReference; +import java.util.WeakHashMap; + +class PrefixLogger extends ExtendedLoggerWrapper { + + // we can not use the built-in Marker tracking (MarkerManager) because the MarkerManager holds + // a permanent reference to the marker; however, we have transient markers from index-level and + // shard-level components so this would effectively be a memory leak + private static final WeakHashMap> markers = new WeakHashMap<>(); + + private final Marker marker; + + public String prefix() { + return marker.getName(); + } + + PrefixLogger(final ExtendedLogger logger, final String name, final String prefix) { + super(logger, name, null); + + final String actualPrefix = (prefix == null ? "" : prefix).intern(); + final Marker actualMarker; + // markers is not thread-safe, so we synchronize access + synchronized (markers) { + final WeakReference marker = markers.get(actualPrefix); + final Marker maybeMarker = marker == null ? null : marker.get(); + if (maybeMarker == null) { + actualMarker = new MarkerManager.Log4jMarker(actualPrefix); + markers.put(actualPrefix, new WeakReference<>(actualMarker)); + } else { + actualMarker = maybeMarker; + } + } + this.marker = actualMarker; + } + + @Override + public void logMessage(final String fqcn, final Level level, final Marker marker, final Message message, final Throwable t) { + assert marker == null; + super.logMessage(fqcn, level, this.marker, message, t); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java b/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java deleted file mode 100644 index a141ceb75aa..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging; - -import org.apache.logging.log4j.message.Message; -import org.apache.logging.log4j.message.MessageFactory2; -import org.apache.logging.log4j.message.ObjectMessage; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.message.SimpleMessage; - -public class PrefixMessageFactory implements MessageFactory2 { - - private String prefix = ""; - - public String getPrefix() { - return prefix; - } - - public void setPrefix(String prefix) { - this.prefix = prefix; - } - - @Override - public Message newMessage(Object message) { - return new PrefixObjectMessage(prefix, message); - } - - private static class PrefixObjectMessage extends ObjectMessage { - - private final String prefix; - private final Object object; - private String prefixObjectString; - - private PrefixObjectMessage(String prefix, Object object) { - super(object); - this.prefix = prefix; - this.object = object; - } - - @Override - public String getFormattedMessage() { - if (prefixObjectString == null) { - prefixObjectString = prefix + super.getFormattedMessage(); - } - return prefixObjectString; - } - - @Override - public void formatTo(StringBuilder buffer) { - buffer.append(prefix); - super.formatTo(buffer); - } - - @Override - public Object[] getParameters() { - return new Object[]{prefix, object}; - } - - } - - @Override - public Message newMessage(String message) { - return new PrefixSimpleMessage(prefix, message); - } - - private static class PrefixSimpleMessage extends SimpleMessage { - - private final String prefix; - private String prefixMessage; - - PrefixSimpleMessage(String prefix, String message) { - super(message); - this.prefix = prefix; - } - - PrefixSimpleMessage(String prefix, CharSequence charSequence) { - super(charSequence); - this.prefix = prefix; - } - - @Override - public String getFormattedMessage() { - if (prefixMessage == null) { - prefixMessage = prefix + super.getFormattedMessage(); - } - return prefixMessage; - } - - @Override - public void formatTo(StringBuilder buffer) { - buffer.append(prefix); - super.formatTo(buffer); - } - - @Override - public int length() { - return prefixMessage.length(); - } - - @Override - public char charAt(int index) { - return prefixMessage.charAt(index); - } - - @Override - public CharSequence subSequence(int start, int end) { - return prefixMessage.subSequence(start, end); - } - - } - - @Override - public Message newMessage(String message, Object... params) { - return new PrefixParameterizedMessage(prefix, message, params); - } - - private static class PrefixParameterizedMessage extends ParameterizedMessage { - - private static ThreadLocal threadLocalStringBuilder = ThreadLocal.withInitial(StringBuilder::new); - - private final String prefix; - private String formattedMessage; - - private PrefixParameterizedMessage(String prefix, String messagePattern, Object... arguments) { - super(messagePattern, arguments); - this.prefix = prefix; - } - - @Override - public String getFormattedMessage() { - if (formattedMessage == null) { - final StringBuilder buffer = threadLocalStringBuilder.get(); - buffer.setLength(0); - formatTo(buffer); - formattedMessage = buffer.toString(); - } - return formattedMessage; - } - - @Override - public void formatTo(StringBuilder buffer) { - buffer.append(prefix); - super.formatTo(buffer); - } - - } - - @Override - public Message newMessage(CharSequence charSequence) { - return new PrefixSimpleMessage(prefix, charSequence); - } - - @Override - public Message newMessage(String message, Object p0) { - return new PrefixParameterizedMessage(prefix, message, p0); - } - - @Override - public Message newMessage(String message, Object p0, Object p1) { - return new PrefixParameterizedMessage(prefix, message, p0, p1); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6); - } - - @Override - public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7); - } - - @Override - public Message newMessage( - String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7, Object p8) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7, p8); - } - - @Override - public Message newMessage( - String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7, Object p8, Object p9) { - return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7, p8, p9); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java b/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java index c4ef2ef8c70..76b0b598178 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java @@ -25,19 +25,12 @@ import org.elasticsearch.common.logging.Loggers; /** An InfoStream (for Lucene's IndexWriter) that redirects * messages to "lucene.iw.ifd" and "lucene.iw" Logger.trace. */ - public final class LoggerInfoStream extends InfoStream { - /** Used for component-specific logging: */ - /** Logger for everything */ - private final Logger logger; + private final Logger parentLogger; - /** Logger for IndexFileDeleter */ - private final Logger ifdLogger; - - public LoggerInfoStream(Logger parentLogger) { - logger = Loggers.getLogger(parentLogger, ".lucene.iw"); - ifdLogger = Loggers.getLogger(parentLogger, ".lucene.iw.ifd"); + public LoggerInfoStream(final Logger parentLogger) { + this.parentLogger = parentLogger; } @Override @@ -53,14 +46,11 @@ public final class LoggerInfoStream extends InfoStream { } private Logger getLogger(String component) { - if (component.equals("IFD")) { - return ifdLogger; - } else { - return logger; - } + return Loggers.getLogger(parentLogger, "." + component); } @Override public void close() { + } } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java index 8652d4c5c05..9e06c39b83e 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java @@ -32,6 +32,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Optional; /** * Utilities for network interfaces / addresses binding and publishing. @@ -227,14 +228,15 @@ public abstract class NetworkUtils { /** Returns addresses for the given interface (it must be marked up) */ static InetAddress[] getAddressesForInterface(String name) throws SocketException { - NetworkInterface intf = NetworkInterface.getByName(name); - if (intf == null) { + Optional networkInterface = getInterfaces().stream().filter((netIf) -> name.equals(netIf.getName())).findFirst(); + + if (networkInterface.isPresent() == false) { throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces()); } - if (!intf.isUp()) { + if (!networkInterface.get().isUp()) { throw new IllegalArgumentException("Interface '" + name + "' is not up and running"); } - List list = Collections.list(intf.getInetAddresses()); + List list = Collections.list(networkInterface.get().getInetAddresses()); if (list.isEmpty()) { throw new IllegalArgumentException("Interface '" + name + "' has no internet addresses"); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index f419da06e68..132505fb403 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -284,8 +284,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover protected void doClose() { masterFD.close(); nodesFD.close(); - publishClusterState.close(); - membership.close(); pingService.close(); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 6dc89998046..04aee9db3d8 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -168,7 +168,6 @@ public class MasterFaultDetection extends FaultDetection { super.close(); stop("closing"); this.listeners.clear(); - transportService.removeHandler(MASTER_PING_ACTION_NAME); } @Override diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java index 0ab5bde25cd..6361d3cde39 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java @@ -139,7 +139,6 @@ public class NodesFaultDetection extends FaultDetection { public void close() { super.close(); stop(); - transportService.removeHandler(PING_ACTION_NAME); } @Override diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java index 961b8d79728..8740d12c5f7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java @@ -76,12 +76,6 @@ public class MembershipAction extends AbstractComponent { transportService.registerRequestHandler(DISCOVERY_LEAVE_ACTION_NAME, LeaveRequest::new, ThreadPool.Names.GENERIC, new LeaveRequestRequestHandler()); } - public void close() { - transportService.removeHandler(DISCOVERY_JOIN_ACTION_NAME); - transportService.removeHandler(DISCOVERY_JOIN_VALIDATE_ACTION_NAME); - transportService.removeHandler(DISCOVERY_LEAVE_ACTION_NAME); - } - public void sendLeaveRequest(DiscoveryNode masterNode, DiscoveryNode node) { transportService.sendRequest(node, DISCOVERY_LEAVE_ACTION_NAME, new LeaveRequest(masterNode), EmptyTransportResponseHandler.INSTANCE_SAME); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 176ac5763e3..afe4902f887 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -160,18 +160,10 @@ public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPin } logger.debug("using initial hosts {}, with concurrent_connects [{}]", hosts, concurrentConnects); - List configuredTargetNodes = new ArrayList<>(); - for (String host : hosts) { - try { - TransportAddress[] addresses = transportService.addressesFromString(host, limitPortCounts); - for (TransportAddress address : addresses) { - configuredTargetNodes.add(new DiscoveryNode(UNICAST_NODE_PREFIX + unicastNodeIdGenerator.incrementAndGet() + "#", - address, emptyMap(), emptySet(), getVersion().minimumCompatibilityVersion())); - } - } catch (Exception e) { - throw new IllegalArgumentException("Failed to resolve address for [" + host + "]", e); - } + for (final String host : hosts) { + configuredTargetNodes.addAll(resolveDiscoveryNodes(host, limitPortCounts, transportService, + () -> UNICAST_NODE_PREFIX + unicastNodeIdGenerator.incrementAndGet() + "#")); } this.configuredTargetNodes = configuredTargetNodes.toArray(new DiscoveryNode[configuredTargetNodes.size()]); @@ -183,6 +175,32 @@ public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPin threadFactory, threadPool.getThreadContext()); } + /** + * Resolves a host to a list of discovery nodes. The host is resolved into a transport + * address (or a collection of addresses if the number of ports is greater than one) and + * the transport addresses are used to created discovery nodes. + * + * @param host the host to resolve + * @param limitPortCounts the number of ports to resolve (should be 1 for non-local transport) + * @param transportService the transport service + * @param idGenerator the generator to supply unique ids for each discovery node + * @return a list of discovery nodes with resolved transport addresses + */ + public static List resolveDiscoveryNodes(final String host, final int limitPortCounts, + final TransportService transportService, final Supplier idGenerator) { + List discoveryNodes = new ArrayList<>(); + try { + TransportAddress[] addresses = transportService.addressesFromString(host, limitPortCounts); + for (TransportAddress address : addresses) { + discoveryNodes.add(new DiscoveryNode(idGenerator.get(), address, emptyMap(), emptySet(), + Version.CURRENT.minimumCompatibilityVersion())); + } + } catch (Exception e) { + throw new IllegalArgumentException("Failed to resolve address for [" + host + "]", e); + } + return discoveryNodes; + } + @Override protected void doStart() { } @@ -193,7 +211,6 @@ public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPin @Override protected void doClose() { - transportService.removeHandler(ACTION_NAME); ThreadPool.terminate(unicastConnectExecutor, 0, TimeUnit.SECONDS); try { IOUtils.close(receivedResponses.values()); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 06c25ebf81a..870e34cc1f3 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -107,11 +107,6 @@ public class PublishClusterStateAction extends AbstractComponent { transportService.registerRequestHandler(COMMIT_ACTION_NAME, CommitClusterStateRequest::new, ThreadPool.Names.SAME, new CommitClusterStateRequestHandler()); } - public void close() { - transportService.removeHandler(SEND_ACTION_NAME); - transportService.removeHandler(COMMIT_ACTION_NAME); - } - public PendingClusterStatesQueue pendingStatesQueue() { return pendingStatesQueue; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 774465fb71a..a9136fb0228 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -789,30 +789,30 @@ public class InternalEngine extends Engine { } catch (Exception e) { throw new FlushFailedEngineException(shardId, e); } - } - /* - * we have to inc-ref the store here since if the engine is closed by a tragic event - * we don't acquire the write lock and wait until we have exclusive access. This might also - * dec the store reference which can essentially close the store and unless we can inc the reference - * we can't use it. - */ - store.incRef(); - try { - // reread the last committed segment infos - lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); - } catch (Exception e) { - if (isClosed.get() == false) { - try { - logger.warn("failed to read latest segment infos on flush", e); - } catch (Exception inner) { - e.addSuppressed(inner); - } - if (Lucene.isCorruptionException(e)) { - throw new FlushFailedEngineException(shardId, e); + /* + * we have to inc-ref the store here since if the engine is closed by a tragic event + * we don't acquire the write lock and wait until we have exclusive access. This might also + * dec the store reference which can essentially close the store and unless we can inc the reference + * we can't use it. + */ + store.incRef(); + try { + // reread the last committed segment infos + lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + } catch (Exception e) { + if (isClosed.get() == false) { + try { + logger.warn("failed to read latest segment infos on flush", e); + } catch (Exception inner) { + e.addSuppressed(inner); + } + if (Lucene.isCorruptionException(e)) { + throw new FlushFailedEngineException(shardId, e); + } } + } finally { + store.decRef(); } - } finally { - store.decRef(); } newCommitId = lastCommittedSegmentInfos.getId(); } catch (FlushFailedEngineException ex) { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java new file mode 100644 index 00000000000..7ce6eb95887 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.fielddata.plain; + +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.MultiValueMode; + +import java.io.IOException; + +public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData + implements IndexGeoPointFieldData { + AbstractLatLonPointDVIndexFieldData(Index index, String fieldName) { + super(index, fieldName); + } + + @Override + public final XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, + XFieldComparatorSource.Nested nested) { + throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); + } + + public static class LatLonPointDVIndexFieldData extends AbstractLatLonPointDVIndexFieldData { + public LatLonPointDVIndexFieldData(Index index, String fieldName) { + super(index, fieldName); + } + + @Override + public AtomicGeoPointFieldData load(LeafReaderContext context) { + try { + LeafReader reader = context.reader(); + FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName); + if (info != null) { + checkCompatible(info); + } + return new LatLonPointDVAtomicFieldData(DocValues.getSortedNumeric(reader, fieldName)); + } catch (IOException e) { + throw new IllegalStateException("Cannot load doc values", e); + } + } + + @Override + public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { + return load(context); + } + + /** helper: checks a fieldinfo and throws exception if its definitely not a LatLonDocValuesField */ + static void checkCompatible(FieldInfo fieldInfo) { + // dv properties could be "unset", if you e.g. used only StoredField with this same name in the segment. + if (fieldInfo.getDocValuesType() != DocValuesType.NONE + && fieldInfo.getDocValuesType() != LatLonDocValuesField.TYPE.docValuesType()) { + throw new IllegalArgumentException("field=\"" + fieldInfo.name + "\" was indexed with docValuesType=" + + fieldInfo.getDocValuesType() + " but this type has docValuesType=" + + LatLonDocValuesField.TYPE.docValuesType() + ", is the field really a LatLonDocValuesField?"); + } + } + } + + public static class Builder implements IndexFieldData.Builder { + @Override + public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, + CircuitBreakerService breakerService, MapperService mapperService) { + // ignore breaker + return new LatLonPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name()); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/LatLonPointDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/LatLonPointDVAtomicFieldData.java new file mode 100644 index 00000000000..d11a79c2556 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/LatLonPointDVAtomicFieldData.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.fielddata.plain; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +final class LatLonPointDVAtomicFieldData extends AbstractAtomicGeoPointFieldData { + private final SortedNumericDocValues values; + + LatLonPointDVAtomicFieldData(SortedNumericDocValues values) { + super(); + this.values = values; + } + + @Override + public long ramBytesUsed() { + return 0; // not exposed by lucene + } + + @Override + public Collection getChildResources() { + return Collections.emptyList(); + } + + @Override + public void close() { + // noop + } + + @Override + public MultiGeoPointValues getGeoPointValues() { + return new MultiGeoPointValues() { + GeoPoint[] points = new GeoPoint[0]; + private int count = 0; + + @Override + public void setDocument(int docId) { + values.setDocument(docId); + count = values.count(); + if (count > points.length) { + final int previousLength = points.length; + points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); + for (int i = previousLength; i < points.length; ++i) { + points[i] = new GeoPoint(Double.NaN, Double.NaN); + } + } + long encoded; + for (int i=0; i>> 32)), GeoEncodingUtils.decodeLongitude((int)encoded)); + } + } + + @Override + public int count() { + return count; + } + + @Override + public GeoPoint valueAt(int index) { + return points[index]; + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/get/GetResult.java b/core/src/main/java/org/elasticsearch/index/get/GetResult.java index 0fa843adc47..b688ed44234 100644 --- a/core/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/core/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -229,7 +230,7 @@ public class GetResult implements Streamable, Iterable, ToXContent { builder.field(Fields.FOUND, exists); if (source != null) { - XContentHelper.writeRawField("_source", source, builder, params); + XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params); } if (!otherFields.isEmpty()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java index 2a24b6a94c6..fa82176c6ab 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java @@ -89,7 +89,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected Boolean ignoreMalformed; - public Builder(String name, GeoPointFieldType fieldType) { + public Builder(String name, MappedFieldType fieldType) { super(name, fieldType, fieldType); } @@ -143,7 +143,16 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { - GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; + // version 5.0 cuts over to LatLonPoint and no longer indexes geohash, or lat/lon separately + if (context.indexCreatedVersion().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + return buildLegacy(context); + } + return build(context, name, fieldType, defaultFieldType, context.indexSettings(), + null, null, null, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); + } + + private Y buildLegacy(Mapper.BuilderContext context) { + LegacyGeoPointFieldType geoPointFieldType = (LegacyGeoPointFieldType)fieldType; FieldMapper latMapper = null; FieldMapper lonMapper = null; @@ -161,9 +170,9 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr lonMapper = (LegacyDoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); } else { latMapper = new NumberFieldMapper.Builder(Names.LAT, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); lonMapper = new NumberFieldMapper.Builder(Names.LON, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); } geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); } @@ -183,7 +192,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr context.path().remove(); return build(context, name, fieldType, defaultFieldType, context.indexSettings(), - latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); + latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); } } @@ -191,8 +200,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder; - if (parserContext.indexVersionCreated().before(Version.V_2_2_0)) { + Version indexVersionCreated = parserContext.indexVersionCreated(); + if (indexVersionCreated.before(Version.V_2_2_0)) { builder = new LegacyGeoPointFieldMapper.Builder(name); + } else if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + builder = new LatLonPointFieldMapper.Builder(name); } else { builder = new GeoPointFieldMapper.Builder(name); } @@ -202,39 +214,43 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr Map.Entry entry = iterator.next(); String propName = entry.getKey(); Object propNode = entry.getValue(); - if (propName.equals("lat_lon")) { - deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("precision_step")) { - deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " - + "in the next major release"); - builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash_prefix")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_prefix parameter is deprecated and will be removed " - + "in the next major release"); - builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); - if (XContentMapValues.lenientNodeBooleanValue(propNode)) { - builder.enableGeoHash(true); + if (indexVersionCreated.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + if (propName.equals("lat_lon")) { + deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " + + "in the next major release"); + builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("precision_step")) { + deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " + + "in the next major release"); + builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); + iterator.remove(); + } else if (propName.equals("geohash")) { + deprecationLogger.deprecated(CONTENT_TYPE + " geohash parameter is deprecated and will be removed " + + "in the next major release"); + builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("geohash_prefix")) { + deprecationLogger.deprecated(CONTENT_TYPE + " geohash_prefix parameter is deprecated and will be removed " + + "in the next major release"); + builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); + if (XContentMapValues.lenientNodeBooleanValue(propNode)) { + builder.enableGeoHash(true); + } + iterator.remove(); + } else if (propName.equals("geohash_precision")) { + deprecationLogger.deprecated(CONTENT_TYPE + " geohash_precision parameter is deprecated and will be removed " + + "in the next major release"); + if (propNode instanceof Integer) { + builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode)); + } else { + builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString())); + } + iterator.remove(); } - iterator.remove(); - } else if (propName.equals("geohash_precision")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_precision parameter is deprecated and will be removed " - + "in the next major release"); - if (propNode instanceof Integer) { - builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode)); - } else { - builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString())); - } - iterator.remove(); - } else if (propName.equals(Names.IGNORE_MALFORMED)) { + } + + if (propName.equals(Names.IGNORE_MALFORMED)) { builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode)); iterator.remove(); } @@ -242,13 +258,29 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr if (builder instanceof LegacyGeoPointFieldMapper.Builder) { return LegacyGeoPointFieldMapper.parse((LegacyGeoPointFieldMapper.Builder) builder, node, parserContext); + } else if (builder instanceof LatLonPointFieldMapper.Builder) { + return (LatLonPointFieldMapper.Builder) builder; } return (GeoPointFieldMapper.Builder) builder; } } - public static class GeoPointFieldType extends MappedFieldType { + public abstract static class GeoPointFieldType extends MappedFieldType { + GeoPointFieldType() { + } + + GeoPointFieldType(GeoPointFieldType ref) { + super(ref); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + } + + public static class LegacyGeoPointFieldType extends GeoPointFieldType { protected MappedFieldType geoHashFieldType; protected int geoHashPrecision; protected boolean geoHashPrefixEnabled; @@ -256,9 +288,9 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected MappedFieldType latFieldType; protected MappedFieldType lonFieldType; - GeoPointFieldType() {} + LegacyGeoPointFieldType() {} - GeoPointFieldType(GeoPointFieldType ref) { + LegacyGeoPointFieldType(LegacyGeoPointFieldType ref) { super(ref); this.geoHashFieldType = ref.geoHashFieldType; // copying ref is ok, this can never be modified this.geoHashPrecision = ref.geoHashPrecision; @@ -269,13 +301,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override public MappedFieldType clone() { - return new GeoPointFieldType(this); + return new LegacyGeoPointFieldType(this); } @Override public boolean equals(Object o) { if (!super.equals(o)) return false; - GeoPointFieldType that = (GeoPointFieldType) o; + LegacyGeoPointFieldType that = (LegacyGeoPointFieldType) o; return geoHashPrecision == that.geoHashPrecision && geoHashPrefixEnabled == that.geoHashPrefixEnabled && java.util.Objects.equals(geoHashFieldType, that.geoHashFieldType) && @@ -289,15 +321,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr lonFieldType); } - @Override - public String typeName() { - return CONTENT_TYPE; - } - @Override public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { super.checkCompatibility(fieldType, conflicts, strict); - GeoPointFieldType other = (GeoPointFieldType)fieldType; + LegacyGeoPointFieldType other = (LegacyGeoPointFieldType)fieldType; if (isLatLonEnabled() != other.isLatLonEnabled()) { conflicts.add("mapper [" + name() + "] has different [lat_lon]"); } @@ -398,9 +425,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr this.ignoreMalformed = ignoreMalformed; } - @Override - public GeoPointFieldType fieldType() { - return (GeoPointFieldType) super.fieldType(); + + + public LegacyGeoPointFieldType legacyFieldType() { + return (LegacyGeoPointFieldType) super.fieldType(); } @Override @@ -414,15 +442,22 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override public Iterator iterator() { + if (this instanceof LatLonPointFieldMapper == false) { + return Iterators.concat(super.iterator(), legacyIterator()); + } + return super.iterator(); + } + + public Iterator legacyIterator() { List extras = new ArrayList<>(); - if (fieldType().isGeoHashEnabled()) { + if (legacyFieldType().isGeoHashEnabled()) { extras.add(geoHashMapper); } - if (fieldType().isLatLonEnabled()) { + if (legacyFieldType().isLatLonEnabled()) { extras.add(latMapper); extras.add(lonMapper); } - return Iterators.concat(super.iterator(), extras.iterator()); + return extras.iterator(); } @Override @@ -436,13 +471,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - if (fieldType().isGeoHashEnabled()) { + if (legacyFieldType().isGeoHashEnabled()) { if (geoHash == null) { geoHash = GeoHashUtils.stringEncode(point.lon(), point.lat()); } addGeoHashField(context, geoHash); } - if (fieldType().isLatLonEnabled()) { + if (legacyFieldType().isLatLonEnabled()) { latMapper.parse(context.createExternalValueContext(point.lat())); lonMapper.parse(context.createExternalValueContext(point.lon())); } @@ -517,8 +552,9 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } private void addGeoHashField(ParseContext context, String geoHash) throws IOException { - int len = Math.min(fieldType().geoHashPrecision(), geoHash.length()); - int min = fieldType().isGeoHashPrefixEnabled() ? 1 : len; + LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType)fieldType; + int len = Math.min(ft.geoHashPrecision(), geoHash.length()); + int min = ft.isGeoHashPrefixEnabled() ? 1 : len; for (int i = len; i >= min; i--) { // side effect of this call is adding the field @@ -537,26 +573,33 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { - builder.field("lat_lon", fieldType().isLatLonEnabled()); - } - if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { - builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { - builder.field("geohash", fieldType().isGeoHashEnabled()); - } - if (includeDefaults || fieldType().isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { - builder.field("geohash_prefix", fieldType().isGeoHashPrefixEnabled()); - } - if (fieldType().isGeoHashEnabled() && (includeDefaults || fieldType().geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) { - builder.field("geohash_precision", fieldType().geoHashPrecision()); + if (this instanceof LatLonPointFieldMapper == false) { + legacyDoXContentBody(builder, includeDefaults, params); } if (includeDefaults || ignoreMalformed.explicit()) { builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } + protected void legacyDoXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType) fieldType; + if (includeDefaults || ft.isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { + builder.field("lat_lon", ft.isLatLonEnabled()); + } + if (ft.isLatLonEnabled() && (includeDefaults || ft.latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { + builder.field("precision_step", ft.latFieldType().numericPrecisionStep()); + } + if (includeDefaults || ft.isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { + builder.field("geohash", ft.isGeoHashEnabled()); + } + if (includeDefaults || ft.isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { + builder.field("geohash_prefix", ft.isGeoHashPrefixEnabled()); + } + if (ft.isGeoHashEnabled() && (includeDefaults || ft.geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) { + builder.field("geohash_precision", ft.geoHashPrecision()); + } + } + @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index ee7a9fa07f9..c1ba15be780 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -48,7 +48,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { public static class Defaults extends BaseGeoPointFieldMapper.Defaults { - public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType(); + public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -130,4 +130,9 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { } super.parse(context, point, geoHash); } + + @Override + public LegacyGeoPointFieldType fieldType() { + return (LegacyGeoPointFieldType) super.fieldType(); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java new file mode 100644 index 00000000000..9ec5970237a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.LatLonPoint; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointDVIndexFieldData; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; + +import java.io.IOException; +import java.util.Iterator; +import java.util.Map; + +/** + * Field Mapper for geo_point types. + * + * Uses lucene 6 LatLonPoint encoding + */ +public class LatLonPointFieldMapper extends BaseGeoPointFieldMapper { + public static final String CONTENT_TYPE = "geo_point"; + public static final Version LAT_LON_FIELD_VERSION = Version.V_5_0_0_alpha6; + + public static class Defaults extends BaseGeoPointFieldMapper.Defaults { + public static final LatLonPointFieldType FIELD_TYPE = new LatLonPointFieldType(); + + static { + FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setHasDocValues(true); + FIELD_TYPE.setDimensions(2, Integer.BYTES); + FIELD_TYPE.freeze(); + } + } + + public static class Builder extends BaseGeoPointFieldMapper.Builder { + public Builder(String name) { + super(name, Defaults.FIELD_TYPE); + } + + @Override + public LatLonPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, + MappedFieldType defaultFieldType, Settings indexSettings, + FieldMapper latMapper, FieldMapper lonMapper, FieldMapper geoHashMapper, + MultiFields multiFields, Explicit ignoreMalformed, + CopyTo copyTo) { + setupFieldType(context); + return new LatLonPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, + ignoreMalformed, copyTo); + } + + @Override + public LatLonPointFieldMapper build(BuilderContext context) { + return super.build(context); + } + } + + public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + return super.parse(name, node, parserContext); + } + } + + public LatLonPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Settings indexSettings, MultiFields multiFields, Explicit ignoreMalformed, + CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, null, null, null, multiFields, ignoreMalformed, copyTo); + } + + public static class LatLonPointFieldType extends GeoPointFieldType { + LatLonPointFieldType() { + } + + LatLonPointFieldType(LatLonPointFieldType ref) { + super(ref); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public MappedFieldType clone() { + return new LatLonPointFieldType(this); + } + + @Override + public IndexFieldData.Builder fielddataBuilder() { + failIfNoDocValues(); + return new AbstractLatLonPointDVIndexFieldData.Builder(); + } + + @Override + public Query termQuery(Object value, QueryShardContext context) { + throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: [" + + name() + "]"); + } + } + + @Override + protected void parse(ParseContext originalContext, GeoPoint point, String geoHash) throws IOException { + // Geopoint fields, by default, will not be included in _all + final ParseContext context = originalContext.setIncludeInAllDefault(false); + + if (ignoreMalformed.value() == false) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); + } + } else { + GeoUtils.normalizePoint(point); + } + if (fieldType().indexOptions() != IndexOptions.NONE) { + context.doc().add(new LatLonPoint(fieldType().name(), point.lat(), point.lon())); + } + if (fieldType().stored()) { + context.doc().add(new StoredField(fieldType().name(), point.toString())); + } + if (fieldType.hasDocValues()) { + context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon())); + } + // if the mapping contains multifields then use the geohash string + if (multiFields.iterator().hasNext()) { + multiFields.parse(this, context.createExternalValueContext(point.geohash())); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java index 3fe195c5d91..99ca07b06bf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java @@ -60,7 +60,7 @@ public class LegacyGeoPointFieldMapper extends BaseGeoPointFieldMapper implement public static class Defaults extends BaseGeoPointFieldMapper.Defaults{ public static final Explicit COERCE = new Explicit<>(false, false); - public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType(); + public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -331,6 +331,11 @@ public class LegacyGeoPointFieldMapper extends BaseGeoPointFieldMapper implement } } + @Override + public LegacyGeoPointFieldType fieldType() { + return (LegacyGeoPointFieldType) super.fieldType(); + } + public static class CustomGeoPointDocValuesField extends CustomDocValuesField { private final ObjectHashSet points; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 9a434cc8a36..8796f8539d9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -133,7 +133,7 @@ public abstract class MappedFieldType extends FieldType { eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString); } - // norelease: we need to override freeze() and add safety checks that all settings are actually set + // TODO: we need to override freeze() and add safety checks that all settings are actually set /** Returns the name of this type, as would be specified in mapping properties */ public abstract String typeName(); diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index ee7220111f3..fe5d5664123 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.Rectangle; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -38,7 +39,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery; import org.elasticsearch.index.search.geo.LegacyIndexedGeoBoundingBoxQuery; @@ -359,7 +361,10 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? @@ -371,7 +376,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 57a189b72f4..ab3b23af0fc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -83,7 +84,7 @@ public class GeohashCellQuery { * @param geohashes optional array of additional geohashes * @return a new GeoBoundinboxfilter */ - public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.GeoPointFieldType fieldType, + public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, String geohash, @Nullable List geohashes) { MappedFieldType geoHashMapper = fieldType.geoHashFieldType(); if (geoHashMapper == null) { @@ -241,11 +242,14 @@ public class GeohashCellQuery { } } - if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) { + if (fieldType instanceof LatLonPointFieldMapper.LatLonPointFieldType) { + throw new QueryShardException(context, "failed to parse [{}] query. " + + "geo_point field no longer supports geohash_cell queries", NAME); + } else if (!(fieldType instanceof BaseGeoPointFieldMapper.LegacyGeoPointFieldType)) { throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); } - BaseGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.GeoPointFieldType) fieldType); + BaseGeoPointFieldMapper.LegacyGeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.LegacyGeoPointFieldType) fieldType); if (!geoFieldType.isGeoHashPrefixEnabled()) { throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName); diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 4516dfde698..454c808a5d5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -94,7 +94,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl ObjectParser.ValueType.OBJECT_ARRAY); PARSER.declareField((p, i, c) -> { try { - i.setFetchSourceContext(FetchSourceContext.parse(c)); + i.setFetchSourceContext(FetchSourceContext.parse(c.parser())); } catch (IOException e) { throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e); } @@ -219,7 +219,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl scriptFields.add(new ScriptField(in)); } } - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); if (in.readBoolean()) { int size = in.readVInt(); sorts = new ArrayList<>(size); @@ -258,7 +258,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl scriptField.writeTo(out); } } - out.writeOptionalStreamable(fetchSourceContext); + out.writeOptionalWriteable(fetchSourceContext); boolean hasSorts = sorts != null; out.writeBoolean(hasSorts); if (hasSorts) { diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java index 53cdf5861a3..823f882f40a 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.fielddata.MultiGeoPointValues; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; import java.io.IOException; @@ -60,7 +61,8 @@ public class GeoDistanceRangeQuery extends Query { private final IndexGeoPointFieldData indexFieldData; public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, - boolean includeUpper, GeoDistance geoDistance, LegacyGeoPointFieldMapper.GeoPointFieldType fieldType, + boolean includeUpper, GeoDistance geoDistance, + LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, IndexGeoPointFieldData indexFieldData, String optimizeBbox) { this.lat = point.lat(); this.lon = point.lon(); diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java index f60a12f023d..6fdb2a906c5 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java @@ -33,7 +33,8 @@ import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; @Deprecated public class LegacyIndexedGeoBoundingBoxQuery { - public static Query create(GeoPoint topLeft, GeoPoint bottomRight, LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) { + public static Query create(GeoPoint topLeft, GeoPoint bottomRight, + LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) { if (!fieldType.isLatLonEnabled()) { throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() + "], can't use indexed filter on it"); @@ -47,7 +48,7 @@ public class LegacyIndexedGeoBoundingBoxQuery { } private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) { + LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) { BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.setMinimumNumberShouldMatch(1); filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD); @@ -57,7 +58,7 @@ public class LegacyIndexedGeoBoundingBoxQuery { } private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) { + LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) { BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST); filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index a38585cf468..f87702771b2 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -48,7 +48,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; @@ -116,9 +115,9 @@ import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; +import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.search.suggest.completion.CompletionFieldStats; @@ -135,7 +134,6 @@ import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -368,60 +366,46 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * @throws IOException if shard state could not be persisted */ public void updateRoutingEntry(final ShardRouting newRouting) throws IOException { - final ShardRouting currentRouting = this.shardRouting; - if (!newRouting.shardId().equals(shardId())) { - throw new IllegalArgumentException("Trying to set a routing entry with shardId " + newRouting.shardId() + " on a shard with shardId " + shardId() + ""); - } - if ((currentRouting == null || newRouting.isSameAllocation(currentRouting)) == false) { - throw new IllegalArgumentException("Trying to set a routing entry with a different allocation. Current " + currentRouting + ", new " + newRouting); - } - if (currentRouting != null) { - if (!newRouting.primary() && currentRouting.primary()) { - logger.warn("suspect illegal state: trying to move shard from primary mode to replica mode"); - } - // if its the same routing, return - if (currentRouting.equals(newRouting)) { - return; - } - } + final ShardRouting currentRouting; + synchronized (mutex) { + currentRouting = this.shardRouting; - if (state == IndexShardState.POST_RECOVERY) { - // if the state is started or relocating (cause it might move right away from started to relocating) - // then move to STARTED - if (newRouting.state() == ShardRoutingState.STARTED || newRouting.state() == ShardRoutingState.RELOCATING) { + if (!newRouting.shardId().equals(shardId())) { + throw new IllegalArgumentException("Trying to set a routing entry with shardId " + newRouting.shardId() + " on a shard with shardId " + shardId() + ""); + } + if ((currentRouting == null || newRouting.isSameAllocation(currentRouting)) == false) { + throw new IllegalArgumentException("Trying to set a routing entry with a different allocation. Current " + currentRouting + ", new " + newRouting); + } + if (currentRouting != null && currentRouting.primary() && newRouting.primary() == false) { + throw new IllegalArgumentException("illegal state: trying to move shard from primary mode to replica mode. Current " + + currentRouting + ", new " + newRouting); + } + + if (state == IndexShardState.POST_RECOVERY && newRouting.active()) { + assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; // we want to refresh *before* we move to internal STARTED state try { getEngine().refresh("cluster_state_started"); } catch (Exception e) { logger.debug("failed to refresh due to move to cluster wide started", e); } - - boolean movedToStarted = false; - synchronized (mutex) { - // do the check under a mutex, so we make sure to only change to STARTED if in POST_RECOVERY - if (state == IndexShardState.POST_RECOVERY) { - changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); - movedToStarted = true; - } else { - logger.debug("state [{}] not changed, not in POST_RECOVERY, global state is [{}]", state, newRouting.state()); - } - } - if (movedToStarted) { - indexEventListener.afterIndexShardStarted(this); - } + changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); + } else if (state == IndexShardState.RELOCATED && + (newRouting.relocating() == false || newRouting.equalsIgnoringMetaData(currentRouting) == false)) { + // if the shard is marked as RELOCATED we have to fail when any changes in shard routing occur (e.g. due to recovery + // failure / cancellation). The reason is that at the moment we cannot safely move back to STARTED without risking two + // active primaries. + throw new IndexShardRelocatedException(shardId(), "Shard is marked as relocated, cannot safely move to state " + newRouting.state()); } + this.shardRouting = newRouting; + persistMetadata(newRouting, currentRouting); } - - if (state == IndexShardState.RELOCATED && - (newRouting.relocating() == false || newRouting.equalsIgnoringMetaData(currentRouting) == false)) { - // if the shard is marked as RELOCATED we have to fail when any changes in shard routing occur (e.g. due to recovery - // failure / cancellation). The reason is that at the moment we cannot safely move back to STARTED without risking two - // active primaries. - throw new IndexShardRelocatedException(shardId(), "Shard is marked as relocated, cannot safely move to state " + newRouting.state()); + if (currentRouting != null && currentRouting.active() == false && newRouting.active()) { + indexEventListener.afterIndexShardStarted(this); + } + if (newRouting.equals(currentRouting) == false) { + indexEventListener.shardRoutingChanged(this, currentRouting, newRouting); } - this.shardRouting = newRouting; - indexEventListener.shardRoutingChanged(this, currentRouting, newRouting); - persistMetadata(newRouting, currentRouting); } /** @@ -451,6 +435,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } public void relocated(String reason) throws IllegalIndexShardStateException, InterruptedException { + assert shardRouting.primary() : "only primaries can be marked as relocated: " + shardRouting; try { indexShardOperationsLock.blockOperations(30, TimeUnit.MINUTES, () -> { // no shard operation locks are being held here, move state from started to relocated @@ -460,6 +445,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (state != IndexShardState.STARTED) { throw new IndexShardNotStartedException(shardId, state); } + // if the master cancelled the recovery, the target will be removed + // and the recovery will stopped. + // However, it is still possible that we concurrently end up here + // and therefore have to protect we don't mark the shard as relocated when + // its shard routing says otherwise. + if (shardRouting.relocating() == false) { + throw new IllegalIndexShardStateException(shardId, IndexShardState.STARTED, + ": shard is no longer relocating " + shardRouting); + } changeState(IndexShardState.RELOCATED, reason); } }); diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 50583a148d7..2a883f1e080 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -258,8 +258,12 @@ public class TermVectorsService { for (Map.Entry> entry : values.entrySet()) { String field = entry.getKey(); Analyzer analyzer = getAnalyzerAtField(indexShard, field, perFieldAnalyzer); - for (Object text : entry.getValue()) { - index.addField(field, text.toString(), analyzer); + if (entry.getValue() instanceof List) { + for (Object text : entry.getValue()) { + index.addField(field, text.toString(), analyzer); + } + } else { + index.addField(field, entry.getValue().toString(), analyzer); } } /* and read vectors from it */ diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index fed710244fb..eb1843dc7d9 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -36,23 +36,24 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.TokenCountFieldMapper; +import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; +import org.elasticsearch.index.mapper.StringFieldMapper; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TimestampFieldMapper; -import org.elasticsearch.index.mapper.TokenCountFieldMapper; +import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; @@ -119,6 +120,7 @@ public class IndicesModule extends AbstractModule { mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); + mappers.put(LatLonPointFieldMapper.CONTENT_TYPE, new LatLonPointFieldMapper.TypeParser()); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index f1381950103..4368d51e346 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -88,6 +88,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.ingest.IngestService; @@ -111,7 +112,9 @@ import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.rest.RestController; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchExtRegistry; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.snapshots.SnapshotShardsService; @@ -327,7 +330,6 @@ public class Node implements Closeable { IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)); modules.add(indicesModule); SearchModule searchModule = new SearchModule(settings, false, pluginsService.filterPlugins(SearchPlugin.class)); - modules.add(searchModule); modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false, settings, clusterModule.getIndexNameExpressionResolver(), settingsModule.getClusterSettings(), pluginsService.filterPlugins(ActionPlugin.class))); @@ -363,7 +365,11 @@ public class Node implements Closeable { .map(Plugin::getCustomMetaDataUpgrader) .collect(Collectors.toList()); final MetaDataUpgrader metaDataUpgrader = new MetaDataUpgrader(customMetaDataUpgraders); + modules.add(b -> { + b.bind(IndicesQueriesRegistry.class).toInstance(searchModule.getQueryParserRegistry()); + b.bind(SearchRequestParsers.class).toInstance(searchModule.getSearchRequestParsers()); + b.bind(SearchExtRegistry.class).toInstance(searchModule.getSearchExtRegistry()); b.bind(PluginsService.class).toInstance(pluginsService); b.bind(Client.class).toInstance(client); b.bind(NodeClient.class).toInstance(client); diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 49edae3ce22..cc1170a4841 100644 --- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -64,10 +64,6 @@ public class VerifyNodeRepositoryAction extends AbstractComponent { transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SAME, new VerifyNodeRepositoryRequestHandler()); } - public void close() { - transportService.removeHandler(ACTION_NAME); - } - public void verify(String repository, String verificationToken, final ActionListener listener) { final DiscoveryNodes discoNodes = clusterService.state().nodes(); final DiscoveryNode localNode = discoNodes.getLocalNode(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 3c65e32c746..782c0ea4441 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -27,9 +27,10 @@ import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.Requests; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -314,16 +315,32 @@ public class RestIndicesAction extends AbstractCatAction { } // package private for testing - Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse health, IndicesStatsResponse stats, MetaData indexMetaDatas) { + Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse response, IndicesStatsResponse stats, MetaData indexMetaDatas) { + final String healthParam = request.param("health"); + final ClusterHealthStatus status; + if (healthParam != null) { + status = ClusterHealthStatus.fromString(healthParam); + } else { + status = null; + } + Table table = getTableWithHeader(request); for (final Index index : indices) { final String indexName = index.getName(); - ClusterIndexHealth indexHealth = health.getIndices().get(indexName); + ClusterIndexHealth indexHealth = response.getIndices().get(indexName); IndexStats indexStats = stats.getIndices().get(indexName); IndexMetaData indexMetaData = indexMetaDatas.getIndices().get(indexName); IndexMetaData.State state = indexMetaData.getState(); + if (status != null) { + if (state == IndexMetaData.State.CLOSE || + (indexHealth == null && !ClusterHealthStatus.RED.equals(status)) || + !indexHealth.getStatus().equals(status)) { + continue; + } + } + table.startRow(); table.addCell(state == IndexMetaData.State.OPEN ? (indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null); table.addCell(state.toString().toLowerCase(Locale.ROOT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index f5dca3f22c9..4d5d3543cb6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -24,10 +24,12 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.Requests; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -37,6 +39,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -52,6 +55,8 @@ import static org.elasticsearch.rest.RestStatus.OK; * */ public class RestBulkAction extends BaseRestHandler { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(RestBulkAction.class)); private final boolean allowExplicitIndex; @@ -75,18 +80,21 @@ public class RestBulkAction extends BaseRestHandler { String defaultIndex = request.param("index"); String defaultType = request.param("type"); String defaultRouting = request.param("routing"); + FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); String fieldsParam = request.param("fields"); - String defaultPipeline = request.param("pipeline"); + if (fieldsParam != null) { + DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); + } String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null; - + String defaultPipeline = request.param("pipeline"); String waitForActiveShards = request.param("wait_for_active_shards"); if (waitForActiveShards != null) { bulkRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, - null, allowExplicitIndex); + bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, + defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex); client.bulk(bulkRequest, new RestBuilderListener(channel) { @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index f127a58579e..550abb3e3bb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -58,12 +58,15 @@ public class RestGetAction extends BaseRestHandler { getRequest.parent(request.param("parent")); getRequest.preference(request.param("preference")); getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime())); - - String sField = request.param("fields"); + if (request.param("fields") != null) { + throw new IllegalArgumentException("The parameter [fields] is no longer supported, " + + "please use [stored_fields] to retrieve stored fields or or [_source] to load the field from _source"); + } + String sField = request.param("stored_fields"); if (sField != null) { String[] sFields = Strings.splitStringByCommaToArray(sField); if (sFields != null) { - getRequest.fields(sFields); + getRequest.storedFields(sFields); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestHeadAction.java index 9fb706bd8e6..ad2f826e584 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestHeadAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestHeadAction.java @@ -91,7 +91,7 @@ public abstract class RestHeadAction extends BaseRestHandler { getRequest.preference(request.param("preference")); getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime())); // don't get any fields back... - getRequest.fields(Strings.EMPTY_ARRAY); + getRequest.storedFields(Strings.EMPTY_ARRAY); // TODO we can also just return the document size as Content-Length client.get(getRequest, new RestResponseListener(channel) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index 50bd4c37ac7..07d221fed8e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -59,9 +59,12 @@ public class RestMultiGetAction extends BaseRestHandler { multiGetRequest.refresh(request.paramAsBoolean("refresh", multiGetRequest.refresh())); multiGetRequest.preference(request.param("preference")); multiGetRequest.realtime(request.paramAsBoolean("realtime", multiGetRequest.realtime())); - + if (request.param("fields") != null) { + throw new IllegalArgumentException("The parameter [fields] is no longer supported, " + + "please use [stored_fields] to retrieve stored fields or _source filtering if the field is not stored"); + } String[] sFields = null; - String sField = request.param("fields"); + String sField = request.param("stored_fields"); if (sField != null) { sFields = Strings.splitStringByCommaToArray(sField); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index d0d7916adfe..91f71e72498 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; @@ -33,12 +35,15 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import static org.elasticsearch.rest.RestRequest.Method.POST; /** */ public class RestUpdateAction extends BaseRestHandler { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class)); @Inject public RestUpdateAction(Settings settings, RestController controller) { @@ -58,13 +63,19 @@ public class RestUpdateAction extends BaseRestHandler { updateRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); } updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert())); + FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request); String sField = request.param("fields"); - if (sField != null) { - String[] sFields = Strings.splitStringByCommaToArray(sField); - if (sFields != null) { - updateRequest.fields(sFields); - } + if (sField != null && fetchSourceContext != null) { + throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request"); } + if (sField != null) { + DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead"); + String[] sFields = Strings.splitStringByCommaToArray(sField); + updateRequest.fields(sFields); + } else if (fetchSourceContext != null) { + updateRequest.fetchSource(fetchSourceContext); + } + updateRequest.retryOnConflict(request.paramAsInt("retry_on_conflict", updateRequest.retryOnConflict())); updateRequest.version(RestActions.parseVersion(request)); updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType())); @@ -72,7 +83,7 @@ public class RestUpdateAction extends BaseRestHandler { // see if we have it in the body if (request.hasContent()) { - updateRequest.source(request.content()); + updateRequest.fromXContent(request.content()); IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { upsertRequest.routing(request.param("routing")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 7088b96c6de..597bf3db615 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -78,11 +78,15 @@ public class RestExplainAction extends BaseRestHandler { explainRequest.query(query); } - String sField = request.param("fields"); + if (request.param("fields") != null) { + throw new IllegalArgumentException("The parameter [fields] is no longer supported, " + + "please use [stored_fields] to retrieve stored fields"); + } + String sField = request.param("stored_fields"); if (sField != null) { String[] sFields = Strings.splitStringByCommaToArray(sField); if (sFields != null) { - explainRequest.fields(sFields); + explainRequest.storedFields(sFields); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index ec9f33e6c19..99b3d4c8894 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.builders.ShapeBuilders; -import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; @@ -286,7 +285,7 @@ import static java.util.Objects.requireNonNull; /** * Sets up things that can be done at search time like queries, aggregations, and suggesters. */ -public class SearchModule extends AbstractModule { +public class SearchModule { public static final Setting INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count", 1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope); @@ -375,16 +374,6 @@ public class SearchModule extends AbstractModule { return aggregatorParsers; } - - @Override - protected void configure() { - if (false == transportClient) { - bind(IndicesQueriesRegistry.class).toInstance(queryParserRegistry); - bind(SearchRequestParsers.class).toInstance(searchRequestParsers); - bind(SearchExtRegistry.class).toInstance(searchExtParserRegistry); - } - } - private void registerAggregations(List plugins) { registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, new AvgParser()) .addResultReader(InternalAvg::new)); @@ -811,4 +800,8 @@ public class SearchModule extends AbstractModule { public FetchPhase getFetchPhase() { return new FetchPhase(fetchSubPhases); } + + public SearchExtRegistry getSearchExtRegistry() { + return searchExtParserRegistry; + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index 828d5679846..3547db7140c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -79,7 +79,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder(size); @@ -112,7 +112,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder scriptFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -680,7 +680,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder> sorts = SortBuilder.fromXContent(context); factory.sorts(sorts); } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { - factory.fetchSource(FetchSourceContext.parse(context)); + factory.fetchSource(FetchSourceContext.parse(context.parser())); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 84fc26fdb2f..c64a5fd552e 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -187,7 +187,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public SearchSourceBuilder(StreamInput in) throws IOException { aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); explain = in.readOptionalBoolean(); - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); docValueFields = (List) in.readGenericValue(); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); from = in.readVInt(); @@ -234,7 +234,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(aggregations); out.writeOptionalBoolean(explain); - out.writeOptionalStreamable(fetchSourceContext); + out.writeOptionalWriteable(fetchSourceContext); out.writeGenericValue(docValueFields); out.writeOptionalWriteable(storedFieldsContext); out.writeVInt(from); @@ -961,7 +961,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else if (context.getParseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) { trackScores = parser.booleanValue(); } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { - fetchSourceContext = FetchSourceContext.parse(context); + fetchSourceContext = FetchSourceContext.parse(context.parser()); } else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) { storedFieldsContext = StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); @@ -983,7 +983,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else if (context.getParseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) { postQueryBuilder = context.parseInnerQueryBuilder().orElse(null); } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { - fetchSourceContext = FetchSourceContext.parse(context); + fetchSourceContext = FetchSourceContext.parse(context.parser()); } else if (context.getParseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) { scriptFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -1068,7 +1068,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } } } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { - fetchSourceContext = FetchSourceContext.parse(context); + fetchSourceContext = FetchSourceContext.parse(context.parser()); } else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher()); } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index ed8c0358dbb..eac878569e1 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -21,14 +21,13 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.transport.TransportResponse; import java.io.IOException; -import static org.elasticsearch.search.internal.InternalSearchHits.StreamContext; - /** * */ @@ -70,9 +69,17 @@ public class FetchSearchResult extends TransportResponse implements FetchSearchR } public void hits(InternalSearchHits hits) { + assert assertNoSearchTarget(hits); this.hits = hits; } + private boolean assertNoSearchTarget(InternalSearchHits hits) { + for (SearchHit hit : hits.hits()) { + assert hit.getShard() == null : "expected null but got: " + hit.getShard(); + } + return true; + } + public InternalSearchHits hits() { return hits; } @@ -96,13 +103,13 @@ public class FetchSearchResult extends TransportResponse implements FetchSearchR public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readLong(); - hits = InternalSearchHits.readSearchHits(in, InternalSearchHits.streamContext().streamShardTarget(StreamContext.ShardTargetType.NO_STREAM)); + hits = InternalSearchHits.readSearchHits(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(id); - hits.writeTo(out, InternalSearchHits.streamContext().streamShardTarget(StreamContext.ShardTargetType.NO_STREAM)); + hits.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java index 864de1628a7..212f8d724d8 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java @@ -21,15 +21,15 @@ package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.rest.RestRequest; import java.io.IOException; @@ -40,7 +40,7 @@ import java.util.List; /** * Context used to fetch the {@code _source}. */ -public class FetchSourceContext implements Streamable, ToXContent { +public class FetchSourceContext implements Writeable, ToXContent { public static final ParseField INCLUDES_FIELD = new ParseField("includes", "include"); public static final ParseField EXCLUDES_FIELD = new ParseField("excludes", "exclude"); @@ -51,9 +51,9 @@ public class FetchSourceContext implements Streamable, ToXContent { private String[] includes; private String[] excludes; - public static FetchSourceContext parse(QueryParseContext context) throws IOException { + public static FetchSourceContext parse(XContentParser parser) throws IOException { FetchSourceContext fetchSourceContext = new FetchSourceContext(); - fetchSourceContext.fromXContent(context); + fetchSourceContext.fromXContent(parser, ParseFieldMatcher.STRICT); return fetchSourceContext; } @@ -88,6 +88,19 @@ public class FetchSourceContext implements Streamable, ToXContent { this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes; } + public FetchSourceContext(StreamInput in) throws IOException { + fetchSource = in.readBoolean(); + includes = in.readStringArray(); + excludes = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(fetchSource); + out.writeStringArray(includes); + out.writeStringArray(excludes); + } + public boolean fetchSource() { return this.fetchSource; } @@ -148,8 +161,7 @@ public class FetchSourceContext implements Streamable, ToXContent { return null; } - public void fromXContent(QueryParseContext context) throws IOException { - XContentParser parser = context.parser(); + public void fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { XContentParser.Token token = parser.currentToken(); boolean fetchSource = true; String[] includes = Strings.EMPTY_ARRAY; @@ -170,7 +182,7 @@ public class FetchSourceContext implements Streamable, ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { - if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) { + if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) { List includesList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { @@ -181,7 +193,7 @@ public class FetchSourceContext implements Streamable, ToXContent { } } includes = includesList.toArray(new String[includesList.size()]); - } else if (context.getParseFieldMatcher().match(currentFieldName, EXCLUDES_FIELD)) { + } else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) { List excludesList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { @@ -197,10 +209,13 @@ public class FetchSourceContext implements Streamable, ToXContent { + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) { + if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) { includes = new String[] {parser.text()}; - } else if (context.getParseFieldMatcher().match(currentFieldName, EXCLUDES_FIELD)) { + } else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) { excludes = new String[] {parser.text()}; + } else { + throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", @@ -229,22 +244,6 @@ public class FetchSourceContext implements Streamable, ToXContent { return builder; } - @Override - public void readFrom(StreamInput in) throws IOException { - fetchSource = in.readBoolean(); - includes = in.readStringArray(); - excludes = in.readStringArray(); - in.readBoolean(); // Used to be transformSource but that was dropped in 2.1 - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(fetchSource); - out.writeStringArray(includes); - out.writeStringArray(excludes); - out.writeBoolean(false); // Used to be transformSource but that was dropped in 2.1 - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java index 4816cba56df..5ff1df9c664 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java @@ -87,11 +87,7 @@ public final class CustomQueryScorer extends QueryScorer { } protected void extract(Query query, float boost, Map terms) throws IOException { - if (query instanceof GeoPointInBBoxQuery) { - // skip all geo queries, see https://issues.apache.org/jira/browse/LUCENE-7293 and - // https://github.com/elastic/elasticsearch/issues/17537 - return; - } else if (query instanceof HasChildQueryBuilder.LateParsingQuery) { + if (query instanceof HasChildQueryBuilder.LateParsingQuery) { // skip has_child or has_parent queries, see: https://github.com/elastic/elasticsearch/issues/14999 return; } else if (query instanceof FunctionScoreQuery) { diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index e8ba4d88aa7..227fe90ee63 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -39,7 +39,6 @@ import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; -import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardTargetType; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -554,18 +553,14 @@ public class InternalSearchHit implements SearchHit { return builder; } - public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { + public static InternalSearchHit readSearchHit(StreamInput in) throws IOException { InternalSearchHit hit = new InternalSearchHit(); - hit.readFrom(in, context); + hit.readFrom(in); return hit; } @Override public void readFrom(StreamInput in) throws IOException { - readFrom(in, InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM)); - } - - public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readOptionalText(); type = in.readOptionalText(); @@ -644,26 +639,13 @@ public class InternalSearchHit implements SearchHit { matchedQueries[i] = in.readString(); } } - - if (context.streamShardTarget() == ShardTargetType.STREAM) { - if (in.readBoolean()) { - shard = new SearchShardTarget(in); - } - } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { - int lookupId = in.readVInt(); - if (lookupId > 0) { - shard = context.handleShardLookup().get(lookupId); - } - } - + shard = in.readOptionalWriteable(SearchShardTarget::new); size = in.readVInt(); if (size > 0) { innerHits = new HashMap<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); - ShardTargetType shardTarget = context.streamShardTarget(); - InternalSearchHits value = InternalSearchHits.readSearchHits(in, context.streamShardTarget(ShardTargetType.NO_STREAM)); - context.streamShardTarget(shardTarget); + InternalSearchHits value = InternalSearchHits.readSearchHits(in); innerHits.put(key, value); } } @@ -671,10 +653,6 @@ public class InternalSearchHit implements SearchHit { @Override public void writeTo(StreamOutput out) throws IOException { - writeTo(out, InternalSearchHits.streamContext().streamShardTarget(ShardTargetType.STREAM)); - } - - public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException { out.writeFloat(score); out.writeOptionalText(id); out.writeOptionalText(type); @@ -752,31 +730,14 @@ public class InternalSearchHit implements SearchHit { out.writeString(matchedFilter); } } - - if (context.streamShardTarget() == ShardTargetType.STREAM) { - if (shard == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - shard.writeTo(out); - } - } else if (context.streamShardTarget() == ShardTargetType.LOOKUP) { - if (shard == null) { - out.writeVInt(0); - } else { - out.writeVInt(context.shardHandleLookup().get(shard)); - } - } - + out.writeOptionalWriteable(shard); if (innerHits == null) { out.writeVInt(0); } else { out.writeVInt(innerHits.size()); for (Map.Entry entry : innerHits.entrySet()) { out.writeString(entry.getKey()); - ShardTargetType shardTarget = context.streamShardTarget(); - entry.getValue().writeTo(out, context.streamShardTarget(ShardTargetType.NO_STREAM)); - context.streamShardTarget(shardTarget); + entry.getValue().writeTo(out); } } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java index 592d4b0751e..9b82c8783a1 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.internal; -import com.carrotsearch.hppc.IntObjectHashMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,65 +28,12 @@ import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; import java.util.Arrays; -import java.util.IdentityHashMap; import java.util.Iterator; -import java.util.Map; import static org.elasticsearch.search.internal.InternalSearchHit.readSearchHit; -/** - * - */ public class InternalSearchHits implements SearchHits { - public static class StreamContext { - - public static enum ShardTargetType { - STREAM, - LOOKUP, - NO_STREAM - } - - private IdentityHashMap shardHandleLookup = new IdentityHashMap<>(); - private IntObjectHashMap handleShardLookup = new IntObjectHashMap<>(); - private ShardTargetType streamShardTarget = ShardTargetType.STREAM; - - public StreamContext reset() { - shardHandleLookup.clear(); - handleShardLookup.clear(); - streamShardTarget = ShardTargetType.STREAM; - return this; - } - - public IdentityHashMap shardHandleLookup() { - return shardHandleLookup; - } - - public IntObjectHashMap handleShardLookup() { - return handleShardLookup; - } - - public ShardTargetType streamShardTarget() { - return streamShardTarget; - } - - public StreamContext streamShardTarget(ShardTargetType streamShardTarget) { - this.streamShardTarget = streamShardTarget; - return this; - } - } - - private static final ThreadLocal cache = new ThreadLocal() { - @Override - protected StreamContext initialValue() { - return new StreamContext(); - } - }; - - public static StreamContext streamContext() { - return cache.get().reset(); - } - public static InternalSearchHits empty() { // We shouldn't use static final instance, since that could directly be returned by native transport clients return new InternalSearchHits(EMPTY, 0, 0); @@ -186,11 +132,6 @@ public class InternalSearchHits implements SearchHits { return builder; } - public static InternalSearchHits readSearchHits(StreamInput in, StreamContext context) throws IOException { - InternalSearchHits hits = new InternalSearchHits(); - hits.readFrom(in, context); - return hits; - } public static InternalSearchHits readSearchHits(StreamInput in) throws IOException { InternalSearchHits hits = new InternalSearchHits(); @@ -200,63 +141,27 @@ public class InternalSearchHits implements SearchHits { @Override public void readFrom(StreamInput in) throws IOException { - readFrom(in, streamContext().streamShardTarget(StreamContext.ShardTargetType.LOOKUP)); - } - - public void readFrom(StreamInput in, StreamContext context) throws IOException { totalHits = in.readVLong(); maxScore = in.readFloat(); int size = in.readVInt(); if (size == 0) { hits = EMPTY; } else { - if (context.streamShardTarget() == StreamContext.ShardTargetType.LOOKUP) { - // read the lookup table first - int lookupSize = in.readVInt(); - for (int i = 0; i < lookupSize; i++) { - context.handleShardLookup().put(in.readVInt(), new SearchShardTarget(in)); - } - } - hits = new InternalSearchHit[size]; for (int i = 0; i < hits.length; i++) { - hits[i] = readSearchHit(in, context); + hits[i] = readSearchHit(in); } } } @Override public void writeTo(StreamOutput out) throws IOException { - writeTo(out, streamContext().streamShardTarget(StreamContext.ShardTargetType.LOOKUP)); - } - - public void writeTo(StreamOutput out, StreamContext context) throws IOException { out.writeVLong(totalHits); out.writeFloat(maxScore); out.writeVInt(hits.length); if (hits.length > 0) { - if (context.streamShardTarget() == StreamContext.ShardTargetType.LOOKUP) { - // start from 1, 0 is for null! - int counter = 1; - for (InternalSearchHit hit : hits) { - if (hit.shard() != null) { - Integer handle = context.shardHandleLookup().get(hit.shard()); - if (handle == null) { - context.shardHandleLookup().put(hit.shard(), counter++); - } - } - } - out.writeVInt(context.shardHandleLookup().size()); - if (!context.shardHandleLookup().isEmpty()) { - for (Map.Entry entry : context.shardHandleLookup().entrySet()) { - out.writeVInt(entry.getValue()); - entry.getKey().writeTo(out); - } - } - } - for (InternalSearchHit hit : hits) { - hit.writeTo(out, context); + hit.writeTo(out); } } } diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index e583cfbf13e..92afb067a52 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.query; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -42,9 +41,6 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.common.lucene.Lucene.readTopDocs; import static org.elasticsearch.common.lucene.Lucene.writeTopDocs; -/** - * - */ public class QuerySearchResult extends QuerySearchResultProvider { private long id; @@ -209,7 +205,6 @@ public class QuerySearchResult extends QuerySearchResultProvider { public void readFromWithId(long id, StreamInput in) throws IOException { this.id = id; -// shardTarget = readSearchShardTarget(in); from = in.readVInt(); size = in.readVInt(); int numSortFieldsPlus1 = in.readVInt(); @@ -232,10 +227,7 @@ public class QuerySearchResult extends QuerySearchResultProvider { } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); - - if (in.getVersion().onOrAfter(Version.V_2_2_0) && in.readBoolean()) { - profileShardResults = new ProfileShardResult(in); - } + profileShardResults = in.readOptionalWriteable(ProfileShardResult::new); } @Override @@ -246,7 +238,6 @@ public class QuerySearchResult extends QuerySearchResultProvider { } public void writeToNoId(StreamOutput out) throws IOException { -// shardTarget.writeTo(out); out.writeVInt(from); out.writeVInt(size); if (sortValueFormats == null) { @@ -273,14 +264,6 @@ public class QuerySearchResult extends QuerySearchResultProvider { } out.writeBoolean(searchTimedOut); out.writeOptionalBoolean(terminatedEarly); - - if (out.getVersion().onOrAfter(Version.V_2_2_0)) { - if (profileShardResults == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - profileShardResults.writeTo(out); - } - } + out.writeOptionalWriteable(profileShardResults); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 84533710781..c86c0565225 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; -import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardTargetType; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -261,8 +260,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion(contextSize); @@ -283,7 +281,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion { if (field instanceof StringField) { spare.resetFromString(field.stringValue()); } else { - spare.resetFromIndexHash(Long.parseLong(field.stringValue())); + // todo return this to .stringValue() once LatLonPoint implements it + spare.resetFromIndexableField(field); } geohashes.add(spare.geohash()); } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 1c807553a24..20b8c77d44a 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -620,19 +620,12 @@ public class TransportService extends AbstractLifecycleComponent { registerRequestHandler(reg); } - protected void registerRequestHandler(RequestHandlerRegistry reg) { + private void registerRequestHandler(RequestHandlerRegistry reg) { synchronized (requestHandlerMutex) { - RequestHandlerRegistry replaced = requestHandlers.get(reg.getAction()); - requestHandlers = MapBuilder.newMapBuilder(requestHandlers).put(reg.getAction(), reg).immutableMap(); - if (replaced != null) { - logger.warn("registered two transport handlers for action {}, handlers: {}, {}", reg.getAction(), reg, replaced); + if (requestHandlers.containsKey(reg.getAction())) { + throw new IllegalArgumentException("transport handlers for action " + reg.getAction() + " is already registered"); } - } - } - - public void removeHandler(String action) { - synchronized (requestHandlerMutex) { - requestHandlers = MapBuilder.newMapBuilder(requestHandlers).remove(action).immutableMap(); + requestHandlers = MapBuilder.newMapBuilder(requestHandlers).put(reg.getAction(), reg).immutableMap(); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index a7234b20ab5..b2798b00fca 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -37,7 +37,10 @@ import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.action.support.replication.TransportReplicationActionTests; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -46,10 +49,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.tasks.TaskResult; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.tasks.TaskResult; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.tasks.MockTaskManager; @@ -71,7 +74,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.function.Function; @@ -94,7 +96,7 @@ import static org.hamcrest.Matchers.not; *

* We need at least 2 nodes so we have a master node a non-master node */ -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, minNumDataNodes = 2) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, minNumDataNodes = 2, transportClientRatio = 0.0) public class TasksIT extends ESIntegTestCase { private Map, RecordingTaskManagerListener> listeners = new HashMap<>(); @@ -326,48 +328,35 @@ public class TasksIT extends ESIntegTestCase { } /** - * Very basic "is it plugged in" style test that indexes a document and - * makes sure that you can fetch the status of the process. The goal here is - * to verify that the large moving parts that make fetching task status work - * fit together rather than to verify any particular status results from - * indexing. For that, look at - * {@link org.elasticsearch.action.support.replication.TransportReplicationActionTests} - * . We intentionally don't use the task recording mechanism used in other - * places in this test so we can make sure that the status fetching works - * properly over the wire. + * Very basic "is it plugged in" style test that indexes a document and makes sure that you can fetch the status of the process. The + * goal here is to verify that the large moving parts that make fetching task status work fit together rather than to verify any + * particular status results from indexing. For that, look at {@link TransportReplicationActionTests}. We intentionally don't use the + * task recording mechanism used in other places in this test so we can make sure that the status fetching works properly over the wire. */ public void testCanFetchIndexStatus() throws InterruptedException, ExecutionException, IOException { - /* - * We prevent any tasks from unregistering until the test is done so we - * can fetch them. This will gum up the server if we leave it enabled - * but we'll be quick so it'll be OK (TM). - */ - ReentrantLock taskFinishLock = new ReentrantLock(); - taskFinishLock.lock(); - ListenableActionFuture indexFuture = null; + /* We make sure all indexing tasks wait to start before this lock is *unlocked* so we can fetch their status with both the get and + * list APIs. */ + CountDownLatch taskRegistered = new CountDownLatch(1); + CountDownLatch letTaskFinish = new CountDownLatch(1); + ListenableActionFuture indexFuture = null; try { - CountDownLatch taskRegistered = new CountDownLatch(1); for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { ((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() { @Override public void onTaskRegistered(Task task) { if (task.getAction().startsWith(IndexAction.NAME)) { taskRegistered.countDown(); + logger.debug("Blocking [{}] starting", task); + try { + letTaskFinish.await(10, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } } @Override public void onTaskUnregistered(Task task) { - /* - * We can't block all tasks here or the task listing task - * would never return. - */ - if (false == task.getAction().startsWith(IndexAction.NAME)) { - return; - } - logger.debug("Blocking {} from being unregistered", task); - taskFinishLock.lock(); - taskFinishLock.unlock(); } @Override @@ -390,16 +379,17 @@ public class TasksIT extends ESIntegTestCase { assertEquals(task.getType(), fetchedWithGet.getType()); assertEquals(task.getAction(), fetchedWithGet.getAction()); assertEquals(task.getDescription(), fetchedWithGet.getDescription()); - // The status won't always be equal - it might change between the list and the get. + assertEquals(task.getStatus(), fetchedWithGet.getStatus()); assertEquals(task.getStartTime(), fetchedWithGet.getStartTime()); assertThat(fetchedWithGet.getRunningTimeNanos(), greaterThanOrEqualTo(task.getRunningTimeNanos())); assertEquals(task.isCancellable(), fetchedWithGet.isCancellable()); assertEquals(task.getParentTaskId(), fetchedWithGet.getParentTaskId()); } } finally { - taskFinishLock.unlock(); + letTaskFinish.countDown(); if (indexFuture != null) { - indexFuture.get(); + IndexResponse indexResponse = indexFuture.get(); + assertArrayEquals(ReplicationResponse.EMPTY, indexResponse.getShardInfo().getFailures()); } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index a887b2f01eb..755bad4c5b9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -60,7 +60,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) -@TestLogging("_root:DEBUG,action.admin.indices.shards:TRACE,cluster.service:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.action.admin.indices.shards:TRACE,org.elasticsearch.cluster.service:TRACE") public class IndicesShardStoreRequestIT extends ESIntegTestCase { @Override diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index d895ef9cbf5..3c89a6ab744 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService.PutRequest; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.NodeServicesProvider; @@ -54,12 +55,17 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { Map map = new HashMap<>(); map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0"); + map.put("index.shard.check_on_startup", "blargh"); request.settings(Settings.builder().put(map).build()); List throwables = putTemplate(request); assertEquals(throwables.size(), 1); assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class)); - assertThat(throwables.get(0).getMessage(), containsString("index must have 1 or more primary shards")); + assertThat(throwables.get(0).getMessage(), + containsString("Failed to parse value [0] for setting [index.number_of_shards] must be >= 1")); + assertThat(throwables.get(0).getMessage(), + containsString("unknown value for [index.shard.check_on_startup] " + + "must be one of [true, false, fix, checksum] but was: blargh")); } public void testIndexTemplateValidationAccumulatesValidationErrors() { @@ -75,7 +81,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class)); assertThat(throwables.get(0).getMessage(), containsString("name must not contain a space")); assertThat(throwables.get(0).getMessage(), containsString("template must not start with '_'")); - assertThat(throwables.get(0).getMessage(), containsString("index must have 1 or more primary shards")); + assertThat(throwables.get(0).getMessage(), + containsString("Failed to parse value [0] for setting [index.number_of_shards] must be >= 1")); } public void testIndexTemplateWithAliasNameEqualToTemplatePattern() { @@ -160,7 +167,9 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { null, null, null, null, null); - MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY), null, null); + MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, + new AliasValidator(Settings.EMPTY), null, null, + new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)); final List throwables = new ArrayList<>(); service.putTemplate(request, new MetaDataIndexTemplateService.PutListener() { @@ -192,7 +201,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { null, null); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService( - Settings.EMPTY, clusterService, createIndexService, new AliasValidator(Settings.EMPTY), indicesService, nodeServicesProvider); + Settings.EMPTY, clusterService, createIndexService, new AliasValidator(Settings.EMPTY), indicesService, nodeServicesProvider, + new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)); final List throwables = new ArrayList<>(); final CountDownLatch latch = new CountDownLatch(1); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 142fb282c20..a74a3879bef 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.script.Script; @@ -39,6 +40,7 @@ import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -125,49 +127,34 @@ public class BulkRequestTests extends ESTestCase { public void testSimpleBulk6() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json"); BulkRequest bulkRequest = new BulkRequest(); - try { - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); - fail("should have thrown an exception about the wrong format of line 1"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about the wrong format of line 1: " + e.getMessage(), - e.getMessage().contains("Malformed action/metadata line [1], expected a simple value for field [_source] but found [START_OBJECT]"), equalTo(true)); - } + ParsingException exc = expectThrows(ParsingException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null)); + assertThat(exc.getMessage(), containsString("Unknown key for a VALUE_STRING in [hello]")); } public void testSimpleBulk7() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json"); BulkRequest bulkRequest = new BulkRequest(); - try { - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); - fail("should have thrown an exception about the wrong format of line 5"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about the wrong format of line 5: " + e.getMessage(), - e.getMessage().contains("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]"), equalTo(true)); - } + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null)); + assertThat(exc.getMessage(), + containsString("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]")); } public void testSimpleBulk8() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json"); BulkRequest bulkRequest = new BulkRequest(); - try { - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); - fail("should have thrown an exception about the unknown parameter _foo"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about the unknown parameter _foo: " + e.getMessage(), - e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true)); - } + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null)); + assertThat(exc.getMessage(), containsString("Action/metadata line [3] contains an unknown parameter [_foo]")); } public void testSimpleBulk9() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json"); BulkRequest bulkRequest = new BulkRequest(); - try { - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); - fail("should have thrown an exception about the wrong format of line 3"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about the wrong format of line 3: " + e.getMessage(), - e.getMessage().contains("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"), equalTo(true)); - } + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null)); + assertThat(exc.getMessage(), containsString("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]")); } public void testSimpleBulk10() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 3b88efca202..c9fa93f76db 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -295,7 +295,8 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { builder.add( client().prepareUpdate() - .setIndex("test").setType("type1").setId(Integer.toString(i)).setFields("counter") + .setIndex("test").setType("type1").setId(Integer.toString(i)) + .setFields("counter") .setScript(script) .setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())); } @@ -405,8 +406,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo("update")); for (int j = 0; j < 5; j++) { - GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).setFields("counter").execute() - .actionGet(); + GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).get(); assertThat(getResponse.isExists(), equalTo(false)); } } diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java index eed5f85c4a4..1d3d2c1b553 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java @@ -52,7 +52,7 @@ public class MultiGetShardRequestTests extends ESTestCase { for (int j = 0; j < fields.length; j++) { fields[j] = randomAsciiOfLength(randomIntBetween(1, 10)); } - item.fields(fields); + item.storedFields(fields); } if (randomBoolean()) { item.version(randomIntBetween(1, Integer.MAX_VALUE)); @@ -84,7 +84,7 @@ public class MultiGetShardRequestTests extends ESTestCase { assertThat(item2.index(), equalTo(item.index())); assertThat(item2.type(), equalTo(item.type())); assertThat(item2.id(), equalTo(item.id())); - assertThat(item2.fields(), equalTo(item.fields())); + assertThat(item2.storedFields(), equalTo(item.storedFields())); assertThat(item2.version(), equalTo(item.version())); assertThat(item2.versionType(), equalTo(item.versionType())); assertThat(item2.fetchSourceContext(), equalTo(item.fetchSourceContext())); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index bda117642a0..f821f82c33a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -131,6 +131,8 @@ public class TransportReplicationActionTests extends ESTestCase { private TransportService transportService; private CapturingTransport transport; private Action action; + private ShardStateAction shardStateAction; + /* * * TransportReplicationAction needs an instance of IndexShard to count operations. * indexShards is reset to null before each test and will be initialized upon request in the tests. @@ -150,7 +152,8 @@ public class TransportReplicationActionTests extends ESTestCase { transportService = new TransportService(clusterService.getSettings(), transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); - action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool); + shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null, threadPool); + action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, shardStateAction, threadPool); } @After @@ -707,7 +710,8 @@ public class TransportReplicationActionTests extends ESTestCase { final ShardRouting replicaRouting = state.getRoutingTable().shardRoutingTable(shardId).replicaShards().get(0); boolean throwException = randomBoolean(); final ReplicationTask task = maybeTask(); - Action action = new Action(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool) { + Action action = new Action(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, shardStateAction, + threadPool) { @Override protected ReplicaResult shardOperationOnReplica(Request request) { assertIndexShardCounter(1); @@ -826,7 +830,8 @@ public class TransportReplicationActionTests extends ESTestCase { setState(clusterService, state); AtomicBoolean throwException = new AtomicBoolean(true); final ReplicationTask task = maybeTask(); - Action action = new Action(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool) { + Action action = new Action(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, shardStateAction, + threadPool) { @Override protected ReplicaResult shardOperationOnReplica(Request request) { assertPhase(task, "replica"); @@ -940,9 +945,10 @@ public class TransportReplicationActionTests extends ESTestCase { Action(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, + ShardStateAction shardStateAction, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool, - new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), + shardStateAction, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); } diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 7b606ee4159..cb27a527f63 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -48,7 +48,7 @@ public class UpdateRequestTests extends ESTestCase { public void testUpdateRequest() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); // simple script - request.source(XContentFactory.jsonBuilder().startObject() + request.fromXContent(XContentFactory.jsonBuilder().startObject() .field("script", "script1") .endObject()); Script script = request.script(); @@ -60,7 +60,7 @@ public class UpdateRequestTests extends ESTestCase { assertThat(params, nullValue()); // simple verbose script - request.source(XContentFactory.jsonBuilder().startObject() + request.fromXContent(XContentFactory.jsonBuilder().startObject() .startObject("script").field("inline", "script1").endObject() .endObject()); script = request.script(); @@ -73,8 +73,13 @@ public class UpdateRequestTests extends ESTestCase { // script with params request = new UpdateRequest("test", "type", "1"); - request.source(XContentFactory.jsonBuilder().startObject().startObject("script").field("inline", "script1").startObject("params") - .field("param1", "value1").endObject().endObject().endObject()); + request.fromXContent(XContentFactory.jsonBuilder().startObject() + .startObject("script") + .field("inline", "script1") + .startObject("params") + .field("param1", "value1") + .endObject() + .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getScript(), equalTo("script1")); @@ -86,8 +91,9 @@ public class UpdateRequestTests extends ESTestCase { assertThat(params.get("param1").toString(), equalTo("value1")); request = new UpdateRequest("test", "type", "1"); - request.source(XContentFactory.jsonBuilder().startObject().startObject("script").startObject("params").field("param1", "value1") - .endObject().field("inline", "script1").endObject().endObject()); + request.fromXContent(XContentFactory.jsonBuilder().startObject().startObject("script") + .startObject("params").field("param1", "value1").endObject() + .field("inline", "script1").endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getScript(), equalTo("script1")); @@ -100,9 +106,19 @@ public class UpdateRequestTests extends ESTestCase { // script with params and upsert request = new UpdateRequest("test", "type", "1"); - request.source(XContentFactory.jsonBuilder().startObject().startObject("script").startObject("params").field("param1", "value1") - .endObject().field("inline", "script1").endObject().startObject("upsert").field("field1", "value1").startObject("compound") - .field("field2", "value2").endObject().endObject().endObject()); + request.fromXContent(XContentFactory.jsonBuilder().startObject() + .startObject("script") + .startObject("params") + .field("param1", "value1") + .endObject() + .field("inline", "script1") + .endObject() + .startObject("upsert") + .field("field1", "value1") + .startObject("compound") + .field("field2", "value2") + .endObject() + .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getScript(), equalTo("script1")); @@ -117,9 +133,19 @@ public class UpdateRequestTests extends ESTestCase { assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); request = new UpdateRequest("test", "type", "1"); - request.source(XContentFactory.jsonBuilder().startObject().startObject("upsert").field("field1", "value1").startObject("compound") - .field("field2", "value2").endObject().endObject().startObject("script").startObject("params").field("param1", "value1") - .endObject().field("inline", "script1").endObject().endObject()); + request.fromXContent(XContentFactory.jsonBuilder().startObject() + .startObject("upsert") + .field("field1", "value1") + .startObject("compound") + .field("field2", "value2") + .endObject() + .endObject() + .startObject("script") + .startObject("params") + .field("param1", "value1") + .endObject() + .field("inline", "script1") + .endObject().endObject()); script = request.script(); assertThat(script, notNullValue()); assertThat(script.getScript(), equalTo("script1")); @@ -135,8 +161,9 @@ public class UpdateRequestTests extends ESTestCase { // script with doc request = new UpdateRequest("test", "type", "1"); - request.source(XContentFactory.jsonBuilder().startObject().startObject("doc").field("field1", "value1").startObject("compound") - .field("field2", "value2").endObject().endObject().endObject()); + request.fromXContent(XContentFactory.jsonBuilder().startObject() + .startObject("doc").field("field1", "value1").startObject("compound") + .field("field2", "value2").endObject().endObject().endObject()); Map doc = request.doc().sourceAsMap(); assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); @@ -187,7 +214,7 @@ public class UpdateRequestTests extends ESTestCase { public void testInvalidBodyThrowsParseException() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); try { - request.source(new byte[] { (byte) '"' }); + request.fromXContent(new byte[] { (byte) '"' }); fail("Should have thrown a ElasticsearchParseException"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("Failed to derive xcontent")); @@ -197,13 +224,56 @@ public class UpdateRequestTests extends ESTestCase { // Related to issue 15338 public void testFieldsParsing() throws Exception { UpdateRequest request = new UpdateRequest("test", "type1", "1") - .source(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")); + .fromXContent(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")); assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(request.fields(), arrayContaining("_source")); request = new UpdateRequest("test", "type2", "2") - .source(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")); + .fromXContent(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")); assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(request.fields(), arrayContaining("field1", "field2")); } + + public void testFetchSourceParsing() throws Exception { + UpdateRequest request = new UpdateRequest("test", "type1", "1"); + request.fromXContent( + XContentFactory.jsonBuilder().startObject().field("_source", true).endObject() + ); + assertThat(request.fetchSource(), notNullValue()); + assertThat(request.fetchSource().includes().length, equalTo(0)); + assertThat(request.fetchSource().excludes().length, equalTo(0)); + assertThat(request.fetchSource().fetchSource(), equalTo(true)); + + request.fromXContent( + XContentFactory.jsonBuilder().startObject().field("_source", false).endObject() + ); + assertThat(request.fetchSource(), notNullValue()); + assertThat(request.fetchSource().includes().length, equalTo(0)); + assertThat(request.fetchSource().excludes().length, equalTo(0)); + assertThat(request.fetchSource().fetchSource(), equalTo(false)); + + request.fromXContent( + XContentFactory.jsonBuilder().startObject().field("_source", "path.inner.*").endObject() + ); + assertThat(request.fetchSource(), notNullValue()); + assertThat(request.fetchSource().fetchSource(), equalTo(true)); + assertThat(request.fetchSource().includes().length, equalTo(1)); + assertThat(request.fetchSource().excludes().length, equalTo(0)); + assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*")); + + request.fromXContent( + XContentFactory.jsonBuilder().startObject() + .startObject("_source") + .field("includes", "path.inner.*") + .field("excludes", "another.inner.*") + .endObject() + .endObject() + ); + assertThat(request.fetchSource(), notNullValue()); + assertThat(request.fetchSource().fetchSource(), equalTo(true)); + assertThat(request.fetchSource().includes().length, equalTo(1)); + assertThat(request.fetchSource().excludes().length, equalTo(1)); + assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*")); + assertThat(request.fetchSource().excludes()[0], equalTo("another.inner.*")); + } } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index f8bdf244999..9a1417bdfa6 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -43,6 +43,9 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--daemonize"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-p", "/tmp/pid"); runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--pidfile", "/tmp/pid"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-q"); + runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--quiet"); + runTestThatVersionIsReturned("-V"); runTestThatVersionIsReturned("--version"); } @@ -66,7 +69,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { } private void runTestVersion(int expectedStatus, Consumer outputConsumer, String... args) throws Exception { - runTest(expectedStatus, false, outputConsumer, (foreground, pidFile, esSettings) -> {}, args); + runTest(expectedStatus, false, outputConsumer, (foreground, pidFile, quiet, esSettings) -> {}, args); } public void testPositionalArgs() throws Exception { @@ -74,21 +77,21 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo]")), - (foreground, pidFile, esSettings) -> {}, + (foreground, pidFile, quiet, esSettings) -> {}, "foo" ); runTest( ExitCodes.USAGE, false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo, bar]")), - (foreground, pidFile, esSettings) -> {}, + (foreground, pidFile, quiet, esSettings) -> {}, "foo", "bar" ); runTest( ExitCodes.USAGE, false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo]")), - (foreground, pidFile, esSettings) -> {}, + (foreground, pidFile, quiet, esSettings) -> {}, "-E", "foo=bar", "foo", "-E", "baz=qux" ); } @@ -109,7 +112,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { expectedStatus, expectedInit, outputConsumer, - (foreground, pidFile, esSettings) -> assertThat(pidFile.toString(), equalTo(expectedPidFile.toString())), + (foreground, pidFile, quiet, esSettings) -> assertThat(pidFile.toString(), equalTo(expectedPidFile.toString())), args); } @@ -124,7 +127,22 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pidFile, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)), + (foreground, pidFile, quiet, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)), + args); + } + + public void testThatParsingQuietOptionWorks() throws Exception { + runQuietTest(true, "-q"); + runQuietTest(true, "--quiet"); + runQuietTest(false); + } + + private void runQuietTest(final boolean expectedQuiet, final String... args) throws Exception { + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, quiet, esSettings) -> assertThat(quiet, equalTo(expectedQuiet)), args); } @@ -133,7 +151,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pidFile, esSettings) -> { + (foreground, pidFile, quiet, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); assertThat(esSettings, hasEntry("foo", "bar")); assertThat(esSettings, hasEntry("baz", "qux")); @@ -147,7 +165,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("Setting [foo] must not be empty")), - (foreground, pidFile, esSettings) -> {}, + (foreground, pidFile, quiet, esSettings) -> {}, "-E", "foo=" ); } @@ -157,7 +175,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.USAGE, false, output -> assertThat(output, containsString("network.host is not a recognized option")), - (foreground, pidFile, esSettings) -> {}, + (foreground, pidFile, quiet, esSettings) -> {}, "--network.host"); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java index 9bfcc554998..d7ed0d8db5e 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java @@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) // this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug -@TestLogging("indices.recovery:DEBUG") +@TestLogging("org.elasticsearch.indices.recovery:DEBUG") public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase { /** diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 33141107b2e..07c1e5dd8da 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; -import org.elasticsearch.discovery.zen.fd.FaultDetection; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -65,7 +64,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @ESIntegTestCase.SuppressLocalMode -@TestLogging("_root:DEBUG,cluster.service:TRACE,discovery.zen:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.discovery.zen:TRACE") public class MinimumMasterNodesIT extends ESIntegTestCase { @Override @@ -364,7 +363,6 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { public void testCanNotPublishWithoutMinMastNodes() throws Exception { Settings settings = Settings.builder() .put("discovery.type", "zen") - .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1h") // disable it .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up @@ -379,7 +377,6 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { new TwoPartitions(Collections.singleton(master), otherNodes), new NetworkDelay(TimeValue.timeValueMinutes(1))); internalCluster().setDisruptionScheme(partition); - partition.startDisrupting(); final CountDownLatch latch = new CountDownLatch(1); final AtomicReference failure = new AtomicReference<>(); @@ -393,6 +390,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { @Override public ClusterState execute(ClusterState currentState) throws Exception { + logger.debug("--> starting the disruption, preventing cluster state publishing"); + partition.startDisrupting(); MetaData.Builder metaData = MetaData.builder(currentState.metaData()).persistentSettings( Settings.builder().put(currentState.metaData().persistentSettings()).put("_SHOULD_NOT_BE_THERE_", true).build() ); diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index cf80e8378ab..b8527872d70 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -363,7 +363,7 @@ public class ClusterServiceIT extends ESIntegTestCase { assertThat(processedLatch.await(1, TimeUnit.SECONDS), equalTo(true)); } - @TestLogging("_root:debug,action.admin.cluster.tasks:trace") + @TestLogging("_root:debug,org.elasticsearch.action.admin.cluster.tasks:trace") public void testPendingUpdateTask() throws Exception { Settings settings = Settings.builder() .put("discovery.type", "local") diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index e21d38f6472..af5dc422e66 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -677,18 +677,30 @@ public class ClusterServiceTests extends ESTestCase { latch.await(); } - @TestLogging("cluster:TRACE") // To ensure that we log cluster state events on TRACE level + @TestLogging("org.elasticsearch.cluster.service:TRACE") // To ensure that we log cluster state events on TRACE level public void testClusterStateUpdateLogging() throws Exception { MockLogAppender mockAppender = new MockLogAppender(); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test1", "cluster.service", Level.DEBUG, - "*processing [test1]: took [1s] no change in cluster_state")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.TRACE, - "*failed to execute cluster state update in [2s]*")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.DEBUG, - "*processing [test3]: took [3s] done applying updated cluster_state (version: *, uuid: *)")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test1", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.DEBUG, + "*processing [test1]: took [1s] no change in cluster_state")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test2", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.TRACE, + "*failed to execute cluster state update in [2s]*")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test3", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.DEBUG, + "*processing [test3]: took [3s] done applying updated cluster_state (version: *, uuid: *)")); - Logger rootLogger = LogManager.getRootLogger(); - Loggers.addAppender(rootLogger, mockAppender); + Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service"); + Loggers.addAppender(clusterLogger, mockAppender); try { final CountDownLatch latch = new CountDownLatch(4); clusterService.currentTimeOverride = System.nanoTime(); @@ -743,7 +755,7 @@ public class ClusterServiceTests extends ESTestCase { fail(); } }); - // Additional update task to make sure all previous logging made it to the logger + // Additional update task to make sure all previous logging made it to the loggerName // We don't check logging for this on since there is no guarantee that it will occur before our check clusterService.submitStateUpdateTask("test4", new ClusterStateUpdateTask() { @Override @@ -763,25 +775,41 @@ public class ClusterServiceTests extends ESTestCase { }); latch.await(); } finally { - Loggers.removeAppender(rootLogger, mockAppender); + Loggers.removeAppender(clusterLogger, mockAppender); } mockAppender.assertAllExpectationsMatched(); } - @TestLogging("cluster:WARN") // To ensure that we log cluster state events on WARN level + @TestLogging("org.elasticsearch.cluster.service:WARN") // To ensure that we log cluster state events on WARN level public void testLongClusterStateUpdateLogging() throws Exception { MockLogAppender mockAppender = new MockLogAppender(); - mockAppender.addExpectation(new MockLogAppender.UnseenEventExpectation("test1 shouldn't see because setting is too low", - "cluster.service", Level.WARN, "*cluster state update task [test1] took [*] above the warn threshold of *")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test2", "cluster.service", Level.WARN, - "*cluster state update task [test2] took [32s] above the warn threshold of *")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.WARN, - "*cluster state update task [test3] took [33s] above the warn threshold of *")); - mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test4", "cluster.service", Level.WARN, - "*cluster state update task [test4] took [34s] above the warn threshold of *")); + mockAppender.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "test1 shouldn't see because setting is too low", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.WARN, + "*cluster state update task [test1] took [*] above the warn threshold of *")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test2", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.WARN, + "*cluster state update task [test2] took [32s] above the warn threshold of *")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test3", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.WARN, + "*cluster state update task [test3] took [33s] above the warn threshold of *")); + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test4", + "org.elasticsearch.cluster.service.ClusterServiceTests$TimedClusterService", + Level.WARN, + "*cluster state update task [test4] took [34s] above the warn threshold of *")); - Logger rootLogger = LogManager.getRootLogger(); - Loggers.addAppender(rootLogger, mockAppender); + Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service"); + Loggers.addAppender(clusterLogger, mockAppender); try { final CountDownLatch latch = new CountDownLatch(5); final CountDownLatch processedFirstTask = new CountDownLatch(1); @@ -857,7 +885,7 @@ public class ClusterServiceTests extends ESTestCase { fail(); } }); - // Additional update task to make sure all previous logging made it to the logger + // Additional update task to make sure all previous logging made it to the loggerName // We don't check logging for this on since there is no guarantee that it will occur before our check clusterService.submitStateUpdateTask("test5", new ClusterStateUpdateTask() { @Override @@ -877,7 +905,7 @@ public class ClusterServiceTests extends ESTestCase { }); latch.await(); } finally { - Loggers.removeAppender(rootLogger, mockAppender); + Loggers.removeAppender(clusterLogger, mockAppender); } mockAppender.assertAllExpectationsMatched(); } diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java index e5b95f258a3..85a74724161 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java @@ -22,6 +22,10 @@ package org.elasticsearch.common.network; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; +import java.net.NetworkInterface; +import java.util.Collections; + +import static org.hamcrest.Matchers.containsString; /** * Tests for network utils. Please avoid using any methods that cause DNS lookups! @@ -74,4 +78,30 @@ public class NetworkUtilsTests extends ESTestCase { assertArrayEquals(new InetAddress[] { InetAddress.getByName("127.0.0.1") }, NetworkUtils.filterIPV4(addresses)); assertArrayEquals(new InetAddress[] { InetAddress.getByName("::1") }, NetworkUtils.filterIPV6(addresses)); } + + /** + * Test that selecting by name is possible and properly matches the addresses on all interfaces and virtual + * interfaces. + * + * Note that to avoid that this test fails when interfaces are down or they do not have addresses assigned to them, + * they are ignored. + */ + public void testAddressInterfaceLookup() throws Exception { + for (NetworkInterface netIf : NetworkUtils.getInterfaces()) { + if (!netIf.isUp() || Collections.list(netIf.getInetAddresses()).isEmpty()) { + continue; + } + + String name = netIf.getName(); + InetAddress[] expectedAddresses = Collections.list(netIf.getInetAddresses()).toArray(new InetAddress[0]); + InetAddress[] foundAddresses = NetworkUtils.getAddressesForInterface(name); + assertArrayEquals(expectedAddresses, foundAddresses); + } + } + + public void testNonExistingInterface() throws Exception { + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> NetworkUtils.getAddressesForInterface("non-existing")); + assertThat(exception.getMessage(), containsString("No interface named 'non-existing' found")); + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index fe06f9a85d4..d0afcde265d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -118,7 +118,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0) @ESIntegTestCase.SuppressLocalMode -@TestLogging("_root:DEBUG,cluster.service:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE") public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { private static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places. @@ -384,7 +384,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition * and verifies that all node agree on the new cluster state */ - @TestLogging("_root:DEBUG,cluster.service:TRACE,gateway:TRACE,indices.store:TRACE") + @TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.gateway:TRACE,org.elasticsearch.indices.store:TRACE") public void testIsolateMasterAndVerifyClusterStateConsensus() throws Exception { final List nodes = startCluster(3); @@ -454,8 +454,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { *

* This test is a superset of tests run in the Jepsen test suite, with the exception of versioned updates */ - @TestLogging("_root:DEBUG,action.index:TRACE,action.get:TRACE,discovery:TRACE,cluster.service:TRACE," - + "indices.recovery:TRACE,indices.cluster:TRACE") + @TestLogging("_root:DEBUG,org.elasticsearch.action.index:TRACE,org.elasticsearch.action.get:TRACE,discovery:TRACE,org.elasticsearch.cluster.service:TRACE," + + "org.elasticsearch.indices.recovery:TRACE,org.elasticsearch.indices.cluster:TRACE") public void testAckedIndexing() throws Exception { final int seconds = !(TEST_NIGHTLY && rarely()) ? 1 : 5; @@ -636,7 +636,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * that already are following another elected master node. These nodes should reject this cluster state and prevent * them from following the stale master. */ - @TestLogging("_root:DEBUG,cluster.service:TRACE,test.disruption:TRACE") + @TestLogging("_root:DEBUG,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.test.disruption:TRACE") public void testStaleMasterNotHijackingMajority() throws Exception { // 3 node cluster with unicast discovery and minimum_master_nodes set to 2: final List nodes = startCluster(3, 2); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 9a111152a1a..0acba3c420f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -91,7 +91,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -@TestLogging("discovery.zen:TRACE") +@TestLogging("org.elasticsearch.discovery.zen:TRACE") public class NodeJoinControllerTests extends ESTestCase { private static ThreadPool threadPool; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 235df2d8a35..b9f65016048 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.discovery.zen; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -35,18 +37,18 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.publish.PublishClusterStateActionTests.AssertingAckListener; -import org.elasticsearch.discovery.zen.publish.PublishClusterStateActionTests.MockNode; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import java.io.Closeable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -55,14 +57,11 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.discovery.zen.ZenDiscovery.shouldIgnoreOrRejectNewClusterState; import static org.elasticsearch.discovery.zen.elect.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; -import static org.elasticsearch.discovery.zen.publish.PublishClusterStateActionTests.createMockNode; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -/** - */ public class ZenDiscoveryUnitTests extends ESTestCase { public void testShouldIgnoreNewClusterState() { @@ -154,59 +153,76 @@ public class ZenDiscoveryUnitTests extends ESTestCase { Settings settings = Settings.builder() .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes)).build(); - Map nodes = new HashMap<>(); - ZenDiscovery zenDiscovery = null; - ClusterService clusterService = null; + ArrayList toClose = new ArrayList<>(); try { Set expectedFDNodes = null; - // create master node and its mocked up services - MockNode master = createMockNode("master", settings, null, threadPool, logger, nodes).setAsMaster(); - ClusterState state = master.clusterState; // initial cluster state + final MockTransportService masterTransport = MockTransportService.local(settings, Version.CURRENT, threadPool); + masterTransport.start(); + DiscoveryNode masterNode = new DiscoveryNode("master", masterTransport.boundAddress().publishAddress(), Version.CURRENT); + toClose.add(masterTransport); + masterTransport.setLocalNode(masterNode); + ClusterState state = ClusterStateCreationUtils.state(masterNode, masterNode, masterNode); // build the zen discovery and cluster service - clusterService = createClusterService(threadPool, master.discoveryNode); - setState(clusterService, state); - zenDiscovery = buildZenDiscovery(settings, master, clusterService, threadPool); + ClusterService masterClusterService = createClusterService(threadPool, masterNode); + toClose.add(masterClusterService); + // TODO: clustername shouldn't be stored twice in cluster service, but for now, work around it + state = ClusterState.builder(masterClusterService.getClusterName()).nodes(state.nodes()).build(); + setState(masterClusterService, state); + ZenDiscovery masterZen = buildZenDiscovery(settings, masterTransport, masterClusterService, threadPool); + toClose.add(masterZen); + masterTransport.acceptIncomingRequests(); + + final MockTransportService otherTransport = MockTransportService.local(settings, Version.CURRENT, threadPool); + otherTransport.start(); + toClose.add(otherTransport); + DiscoveryNode otherNode = new DiscoveryNode("other", otherTransport.boundAddress().publishAddress(), Version.CURRENT); + otherTransport.setLocalNode(otherNode); + final ClusterState otherState = ClusterState.builder(masterClusterService.getClusterName()) + .nodes(DiscoveryNodes.builder().add(otherNode).localNodeId(otherNode.getId())).build(); + ClusterService otherClusterService = createClusterService(threadPool, masterNode); + toClose.add(otherClusterService); + setState(otherClusterService, otherState); + ZenDiscovery otherZen = buildZenDiscovery(settings, otherTransport, otherClusterService, threadPool); + toClose.add(otherZen); + otherTransport.acceptIncomingRequests(); + + + masterTransport.connectToNode(otherNode); + otherTransport.connectToNode(masterNode); // a new cluster state with a new discovery node (we will test if the cluster state // was updated by the presence of this node in NodesFaultDetection) - MockNode newNode = createMockNode("new_node", settings, null, threadPool, logger, nodes); - ClusterState newState = ClusterState.builder(state).incrementVersion().nodes( - DiscoveryNodes.builder(state.nodes()).add(newNode.discoveryNode).masterNodeId(master.discoveryNode.getId()) + ClusterState newState = ClusterState.builder(masterClusterService.state()).incrementVersion().nodes( + DiscoveryNodes.builder(state.nodes()).add(otherNode).masterNodeId(masterNode.getId()) ).build(); try { // publishing a new cluster state ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent("testing", newState, state); AssertingAckListener listener = new AssertingAckListener(newState.nodes().getSize() - 1); - expectedFDNodes = zenDiscovery.getFaultDetectionNodes(); - zenDiscovery.publish(clusterChangedEvent, listener); + expectedFDNodes = masterZen.getFaultDetectionNodes(); + masterZen.publish(clusterChangedEvent, listener); listener.await(1, TimeUnit.HOURS); // publish was a success, update expected FD nodes based on new cluster state - expectedFDNodes = fdNodesForState(newState, master.discoveryNode); + expectedFDNodes = fdNodesForState(newState, masterNode); } catch (Discovery.FailedToCommitClusterStateException e) { // not successful, so expectedFDNodes above should remain what it was originally assigned assertEquals(3, minMasterNodes); // ensure min master nodes is the higher value, otherwise we shouldn't fail } - assertEquals(expectedFDNodes, zenDiscovery.getFaultDetectionNodes()); + assertEquals(expectedFDNodes, masterZen.getFaultDetectionNodes()); } finally { - // clean close of transport service and publish action for each node - zenDiscovery.close(); - clusterService.close(); - for (MockNode curNode : nodes.values()) { - curNode.action.close(); - curNode.service.close(); - } + IOUtils.close(toClose); terminate(threadPool); } } - private ZenDiscovery buildZenDiscovery(Settings settings, MockNode master, ClusterService clusterService, ThreadPool threadPool) { + private ZenDiscovery buildZenDiscovery(Settings settings, TransportService service, ClusterService clusterService, ThreadPool threadPool) { ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); ZenPingService zenPingService = new ZenPingService(settings, Collections.emptySet()); ElectMasterService electMasterService = new ElectMasterService(settings); - ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, master.service, clusterService, + ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, clusterService, clusterSettings, zenPingService, electMasterService); zenDiscovery.start(); return zenDiscovery; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 1b0d6f63fd5..50ec06694fe 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -82,7 +82,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -@TestLogging("discovery.zen.publish:TRACE") +@TestLogging("org.elasticsearch.discovery.zen.publish:TRACE") public class PublishClusterStateActionTests extends ESTestCase { private static final ClusterName CLUSTER_NAME = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY); @@ -223,7 +223,6 @@ public class PublishClusterStateActionTests extends ESTestCase { public void tearDown() throws Exception { super.tearDown(); for (MockNode curNode : nodes.values()) { - curNode.action.close(); curNode.service.close(); } terminate(threadPool); diff --git a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java index 9196a16060c..91c2655b3c2 100644 --- a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java @@ -103,7 +103,7 @@ public class DocumentActionsIT extends ESIntegTestCase { logger.info("Get [type1/1] with script"); for (int i = 0; i < 5; i++) { - getResult = client().prepareGet("test", "type1", "1").setFields("name").execute().actionGet(); + getResult = client().prepareGet("test", "type1", "1").setStoredFields("name").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat(getResult.isExists(), equalTo(true)); assertThat(getResult.getSourceAsBytes(), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 4aacb672999..528b03bc831 100644 --- a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -131,7 +131,7 @@ public class ExplainActionIT extends ESIntegTestCase { refresh(); ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") .setQuery(QueryBuilders.matchAllQuery()) - .setFields("obj1.field1").get(); + .setStoredFields("obj1.field1").get(); assertNotNull(response); assertTrue(response.isMatch()); assertNotNull(response.getExplanation()); @@ -148,7 +148,7 @@ public class ExplainActionIT extends ESIntegTestCase { refresh(); response = client().prepareExplain(indexOrAlias(), "test", "1") .setQuery(QueryBuilders.matchAllQuery()) - .setFields("obj1.field1").setFetchSource(true).get(); + .setStoredFields("obj1.field1").setFetchSource(true).get(); assertNotNull(response); assertTrue(response.isMatch()); assertNotNull(response.getExplanation()); @@ -164,7 +164,7 @@ public class ExplainActionIT extends ESIntegTestCase { response = client().prepareExplain(indexOrAlias(), "test", "1") .setQuery(QueryBuilders.matchAllQuery()) - .setFields("obj1.field1", "obj1.field2").get(); + .setStoredFields("obj1.field1", "obj1.field2").get(); assertNotNull(response); assertTrue(response.isMatch()); String v1 = (String) response.getGetResult().field("obj1.field1").getValue(); diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 052fa91d876..434536ac8d9 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -84,7 +84,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime get 1 (no source, implicit)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFields(Strings.EMPTY_ARRAY).get(); + response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields(Strings.EMPTY_ARRAY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); Set fields = new HashSet<>(response.getFields().keySet()); @@ -109,7 +109,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); + response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); @@ -117,7 +117,8 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource("field1", null).get(); + response = client().prepareGet(indexOrAlias(), "type1", "1") + .setStoredFields("field1").setFetchSource("field1", null).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap(), hasKey("field1")); @@ -143,7 +144,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field (loaded from index)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); + response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); @@ -151,7 +152,8 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (loaded from index)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource(true).get(); + response = client().prepareGet(indexOrAlias(), "type1", "1") + .setStoredFields("field1").setFetchSource(true).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), not(nullValue())); @@ -232,8 +234,8 @@ public class GetActionIT extends ESIntegTestCase { // multi get with specific field response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").fields("field")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").fields("field")) + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").storedFields("field")) + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").storedFields("field")) .get(); assertThat(response.getResponses().length, equalTo(2)); @@ -269,7 +271,7 @@ public class GetActionIT extends ESIntegTestCase { client().prepareIndex("test", "type2", "1") .setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); - response = client().prepareGet("test", "type1", "1").setFields("field").get(); + response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); @@ -281,7 +283,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); - response = client().prepareGet("test", "type2", "1").setFields("field").get(); + response = client().prepareGet("test", "type2", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); @@ -294,7 +296,7 @@ public class GetActionIT extends ESIntegTestCase { // Now test values being fetched from stored fields. refresh(); - response = client().prepareGet("test", "type1", "1").setFields("field").get(); + response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); @@ -304,7 +306,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); - response = client().prepareGet("test", "type2", "1").setFields("field").get(); + response = client().prepareGet("test", "type2", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); @@ -546,7 +548,7 @@ public class GetActionIT extends ESIntegTestCase { GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") .setRouting("1") - .setFields("field1") + .setStoredFields("field1") .get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); @@ -559,7 +561,7 @@ public class GetActionIT extends ESIntegTestCase { flush(); getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") - .setFields("field1") + .setStoredFields("field1") .setRouting("1") .get(); assertThat(getResponse.isExists(), equalTo(true)); @@ -584,21 +586,18 @@ public class GetActionIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .get(); - try { - client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); - fail(); - } catch (IllegalArgumentException e) { - //all well - } + + IllegalArgumentException exc = + expectThrows(IllegalArgumentException.class, + () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()); + assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); flush(); - try { - client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); - fail(); - } catch (IllegalArgumentException e) { - //all well - } + exc = + expectThrows(IllegalArgumentException.class, + () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()); + assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); } public void testGetFieldsComplexField() throws Exception { @@ -645,14 +644,14 @@ public class GetActionIT extends ESIntegTestCase { logger.info("checking real time retrieval"); String field = "field1.field2.field3.field4"; - GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); + GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); - getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); + getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); @@ -677,14 +676,14 @@ public class GetActionIT extends ESIntegTestCase { logger.info("checking post-flush retrieval"); - getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); + getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); - getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); + getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); @@ -711,7 +710,7 @@ public class GetActionIT extends ESIntegTestCase { index("test", "my-type1", "1", "some_field", "some text"); refresh(); - GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("_all").get(); + GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("_all").get(); assertNotNull(getResponse.getField("_all").getValue()); assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text")); } @@ -948,12 +947,12 @@ public class GetActionIT extends ESIntegTestCase { private void assertGetFieldException(String index, String type, String docId, String field) { try { - client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field).get(); + client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field); fail(); } catch (ElasticsearchException e) { assertTrue(e.getMessage().contains("You can only get this field after refresh() has been called.")); } - MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).fields(field)).get(); + MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).storedFields(field)).get(); assertNull(multiGetResponse.getResponses()[0].getResponse()); assertTrue(multiGetResponse.getResponses()[0].getFailure().getMessage().contains("You can only get this field after refresh() has been called.")); } @@ -993,7 +992,7 @@ public class GetActionIT extends ESIntegTestCase { } private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) { - MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).fields(field); + MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).storedFields(field); if (routing != null) { getItem.routing(routing); } @@ -1004,7 +1003,7 @@ public class GetActionIT extends ESIntegTestCase { } private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) { - GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field); + GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field); if (routing != null) { getRequestBuilder.setRouting(routing); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 989c60fc916..a97cd76b80b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -184,7 +184,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { } - @TestLogging("gateway:TRACE") + @TestLogging("org.elasticsearch.gateway:TRACE") public void testIndexWithFewDocuments() throws Exception { final Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); @@ -599,7 +599,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { // deleting the index and hence, deleting all the shard data for the index, the test // failure still showed some Lucene files in the data directory for that index. Not sure // why that is, so turning on more logging here. - @TestLogging("indices:TRACE,env:TRACE") + @TestLogging("org.elasticsearch.indices:TRACE,org.elasticsearch.env:TRACE") public void testShadowReplicaNaturalRelocation() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 1c35ea1f281..5f009800996 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.logging.PrefixMessageFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; @@ -1510,18 +1509,18 @@ public class InternalEngineTests extends ESTestCase { public boolean sawIndexWriterIFDMessage; public MockAppender(final String name) throws IllegalAccessException { - super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null); + super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null); } @Override public void append(LogEvent event) { final String formattedMessage = event.getMessage().getFormattedMessage(); - if (event.getLevel() == Level.TRACE && formattedMessage.contains("[index][1] ")) { - if (event.getLoggerName().endsWith("lucene.iw") && + if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][1] ")) { + if (event.getLoggerName().endsWith(".IW") && formattedMessage.contains("IW: apply all deletes during flush")) { sawIndexWriterMessage = true; } - if (event.getLoggerName().endsWith("lucene.iw.ifd")) { + if (event.getLoggerName().endsWith(".IFD")) { sawIndexWriterIFDMessage = true; } } @@ -1556,7 +1555,7 @@ public class InternalEngineTests extends ESTestCase { } finally { Loggers.removeAppender(rootLogger, mockAppender); - Loggers.setLevel(rootLogger, savedLevel.toString()); + Loggers.setLevel(rootLogger, savedLevel); } } @@ -1565,16 +1564,7 @@ public class InternalEngineTests extends ESTestCase { assumeFalse("who tests the tester?", VERBOSE); MockAppender mockAppender = new MockAppender("testIndexWriterIFDInfoStream"); - final Logger iwIFDLogger; - if (LogManager.getContext(false).hasLogger("org.elasticsearch.index.engine.lucene.iw.ifd", new PrefixMessageFactory())) { - // Works when running this test inside Intellij: - iwIFDLogger = LogManager.getLogger("org.elasticsearch.index.engine.lucene.iw.ifd"); - assertNotNull(iwIFDLogger); - } else { - // Works when running this test from command line: - assertTrue(LogManager.getContext(false).hasLogger("index.engine.lucene.iw.ifd", new PrefixMessageFactory())); - iwIFDLogger = LogManager.getLogger("index.engine.lucene.iw.ifd"); - } + final Logger iwIFDLogger = Loggers.getLogger("org.elasticsearch.index.engine.Engine.IFD"); Loggers.addAppender(iwIFDLogger, mockAppender); Loggers.setLevel(iwIFDLogger, Level.DEBUG); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index a36f6188713..55f6b0b52cd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -293,7 +293,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") - .startObject("mapping").field("type", "geo_point").endObject() + .startObject("mapping").field("type", "geo_point").field("doc_values", false).endObject() .endObject().endObject().endArray().endObject().endObject().string(); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); @@ -373,7 +373,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { public void testMappedGeoPointArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("foo").field("type", "geo_point") + .startObject("properties").startObject("foo").field("type", "geo_point").field("doc_values", false) .endObject().endObject().endObject().endObject().string(); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 6f76b1c59b8..7fa0a3c1161 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -25,6 +25,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.GeoHashUtils; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -39,6 +41,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -83,8 +86,10 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.point"), notNullValue()); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); + } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1e-5)); + assertThat(point.lon(), closeTo(51.0, 1e-5)); } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); @@ -141,8 +146,12 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.point"), notNullValue()); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else { + } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); + } else { + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1E-5)); + assertThat(point.lon(), closeTo(51.0, 1E-5)); } IndexableField shape = doc.rootDoc().getField("field.shape"); @@ -211,7 +220,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.point"), notNullValue()); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else { + } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index 2e75124c29a..1efae7ccb26 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -59,6 +59,7 @@ public class ExternalMapper extends FieldMapper { private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); private GeoPointFieldMapper.Builder pointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); private LegacyGeoPointFieldMapper.Builder legacyPointBuilder = new LegacyGeoPointFieldMapper.Builder(Names.FIELD_POINT); + private LatLonPointFieldMapper.Builder latLonPointBuilder = new LatLonPointFieldMapper.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; private String generatedValue; @@ -82,8 +83,14 @@ public class ExternalMapper extends FieldMapper { context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); - BaseGeoPointFieldMapper pointMapper = (context.indexCreatedVersion().before(Version.V_2_2_0)) ? - legacyPointBuilder.build(context) : pointBuilder.build(context); + BaseGeoPointFieldMapper pointMapper; + if (context.indexCreatedVersion().before(Version.V_2_2_0)) { + pointMapper = legacyPointBuilder.build(context); + } else if (context.indexCreatedVersion().onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + pointMapper = latLonPointBuilder.build(context); + } else { + pointMapper = pointBuilder.build(context); + } GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); @@ -198,7 +205,7 @@ public class ExternalMapper extends FieldMapper { MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType); BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); - GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); + BaseGeoPointFieldMapper pointMapperUpdate = (BaseGeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); if (update == this diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java index 342fa247d73..c75871e43b3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -112,7 +112,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoDistanceRangeQuery("field.point", 42.0, 51.0).to("1km")) + .setPostFilter(QueryBuilders.geoDistanceQuery("field.point").point(42.0, 51.0).distance("1km")) .execute().actionGet(); assertThat(response.getHits().totalHits(), equalTo((long) 1)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index b5256761f6e..7c4acb44039 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.mapper; -import org.apache.lucene.index.IndexableField; import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -31,17 +30,13 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.geo.RandomGeoGenerator; +import org.hamcrest.CoreMatchers; import java.util.Collection; import java.util.List; @@ -67,12 +62,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { return pluginList(InternalSettingsPlugin.class); } - public void testLatLonValues() throws Exception { + public void testLegacyLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -96,13 +91,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testLatLonValuesWithGeohash() throws Exception { + public void testLegacyLatLonValuesWithGeohash() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject() .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -121,12 +116,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testLatLonInOneValueWithGeohash() throws Exception { + public void testLegacyLatLonInOneValueWithGeohash() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -145,12 +140,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testGeoHashIndexValue() throws Exception { + public void testLegacyGeoHashIndexValue() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -170,11 +165,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testGeoHashValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -184,13 +181,15 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().get("point"), notNullValue()); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + } } - public void testNormalizeLatLonValuesDefault() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + public void testNormalizeLegacyLatLonValuesDefault() throws Exception { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); // default to normalize XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); @@ -239,8 +238,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + public void testLegacyValidateLatLonValues() throws Exception { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -338,8 +337,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testNoValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + public void testNoValidateLegacyLatLonValues() throws Exception { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); if (version.before(Version.V_2_2_0)) { @@ -400,11 +399,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testLatLonValuesStored() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", true).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -414,24 +415,29 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); + assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } } public void testArrayLatLonValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -444,28 +450,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - if (version.onOrAfter(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(4)); - - // point field for 1st value - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - // stored field for 1st value - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); - // indexed hash - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - - // point field for 2nd value - assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5)); - // stored field for 2nd value - assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); - // indexed hash - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); + // doc values are enabled by default, but in this test we disable them; we should only have 2 points + assertThat(doc.rootDoc().getFields("point"), notNullValue()); + if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } else { + assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); + } + if (version.before(Version.V_5_0_0_alpha2)) { assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); @@ -486,13 +478,16 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } public void testLatLonInOneValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", + new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -500,49 +495,61 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } } public void testLatLonInOneValueStored() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject() - .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", + new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") .endObject() .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), + equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } } public void testLatLonInOneValueArray() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", + new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -553,41 +560,39 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); + // doc values are enabled by default, but in this test we disable them; we should only have 2 points + assertThat(doc.rootDoc().getFields("point"), notNullValue()); if (version.before(Version.V_5_0_0_alpha2)) { + assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - } else { - IndexableField[] latPoints = doc.rootDoc().getFields("point.lat"); - IndexableField[] lonPoints = doc.rootDoc().getFields("point.lon"); - assertThat(latPoints.length, equalTo(4)); - assertThat(lonPoints.length, equalTo(4)); - assertThat(latPoints[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(lonPoints[0].numericValue().doubleValue(), equalTo(1.3)); - assertThat(latPoints[2].numericValue().doubleValue(), equalTo(1.4)); - assertThat(lonPoints[2].numericValue().doubleValue(), equalTo(1.5)); + } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else { + } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else { + } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } } public void testLonLatArray() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() - .endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -597,22 +602,27 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } } public void testLonLatArrayDynamic() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates").startObject() - .startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point") - .field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") + .startObject("mapping").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -622,21 +632,26 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } } public void testLonLatArrayStored() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -646,23 +661,31 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + assertThat(doc.rootDoc().getField("point"), notNullValue()); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); + assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + } } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); + assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); } } public void testLonLatArrayArrayStored() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("store", true).endObject().endObject().endObject().endObject().string(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true); + } + String mapping = xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() + .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -675,7 +698,9 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); + assertThat(doc.rootDoc().getFields("point"), notNullValue()); if (version.before(Version.V_5_0_0_alpha2)) { + assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(2)); assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); @@ -692,19 +717,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } else { assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } - } else { - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(4)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5)); - assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); + } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4)); } } @@ -717,11 +731,35 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); // test deprecation exceptions on newly created indexes - try { - String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate", true).endObject().endObject() + if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + try { + String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject().string(); + parser.parse("type", new CompressedXContent(normalizeMapping)); + } catch (MapperParsingException e) { + assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [lat_lon : true]"); + } + } + + if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + try { + String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject() + .endObject().endObject().string(); + parser.parse("type", new CompressedXContent(normalizeMapping)); + } catch (MapperParsingException e) { + assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]"); + } + } + + try { + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -729,10 +767,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } try { - String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lat", true).endObject().endObject() - .endObject().endObject().string(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -740,10 +780,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } try { - String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("validate_lon", true).endObject().endObject() - .endObject().endObject().string(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(validateMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -752,10 +794,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { // test deprecated normalize try { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize", true).endObject().endObject() - .endObject().endObject().string(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -763,10 +807,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } try { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lat", true).endObject().endObject() - .endObject().endObject().string(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -774,10 +820,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } try { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("normalize_lon", true).endObject().endObject() - .endObject().endObject().string(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point"); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); + } + String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string(); parser.parse("type", new CompressedXContent(normalizeMapping)); fail("process completed successfully when " + MapperParsingException.class.getName() + " expected"); } catch (MapperParsingException e) { @@ -785,8 +833,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } } - public void testGeoPointMapperMerge() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + public void testLegacyGeoPointMapperMerge() throws Exception { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) @@ -812,7 +860,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); } - public void testGeoHashSearch() throws Exception { + public void testLegacyGeoHashSearch() throws Exception { // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision int precision = randomIntBetween(1, 12); String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") @@ -820,7 +868,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha1); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); @@ -837,7 +885,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { assertEquals("dr5regy6rc6y".substring(0, precision), m.get("location.geohash").value()); } - public void testGeoHashSearchWithPrefix() throws Exception { + public void testLegacyGeoHashSearchWithPrefix() throws Exception { // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision int precision = randomIntBetween(1, 12); String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") @@ -845,7 +893,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().endObject().string(); // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) .addMapping("pin", mapping); @@ -870,9 +918,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testMultiField() throws Exception { int numDocs = randomIntBetween(10, 100); String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") - .field("type", "geo_point").startObject("fields") - .startObject("geohash").field("type", "geo_point").field("geohash_precision", 12).field("geohash_prefix", true).endObject() - .startObject("latlon").field("type", "geo_point").field("lat_lon", true).endObject().endObject() + .field("type", "geo_point") + .startObject("fields") + .startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword + .startObject("latlon").field("type", "string").endObject() // test geohash as string + .endObject() .endObject().endObject().endObject().endObject().string(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") .addMapping("pin", mapping); @@ -886,6 +936,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("lon", pt.lon()).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); } + // TODO these tests are bogus and need to be Fix // query by geohash subfield SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().totalHits()); @@ -899,7 +950,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testEmptyName() throws Exception { // after 5.x String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("").field("type", "geo_point").field("lat_lon", true).endObject().endObject() + .startObject("properties").startObject("").field("type", "geo_point").endObject().endObject() .endObject().endObject().string(); Version version = Version.CURRENT; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index 6e6222ac871..fac30002fbb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -18,16 +18,13 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; import org.junit.Before; public class GeoPointFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new BaseGeoPointFieldMapper.GeoPointFieldType(); + return new LegacyGeoPointFieldType(); } @Before @@ -35,13 +32,14 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase { addModifier(new Modifier("geohash", false) { @Override public void modify(MappedFieldType ft) { - ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true); + ((LegacyGeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true); } }); addModifier(new Modifier("lat_lon", false) { @Override public void modify(MappedFieldType ft) { - ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new LegacyDoubleFieldMapper.DoubleFieldType(), new LegacyDoubleFieldMapper.DoubleFieldType()); + ((LegacyGeoPointFieldType)ft).setLatLonEnabled(new LegacyDoubleFieldMapper.DoubleFieldType(), + new LegacyDoubleFieldMapper.DoubleFieldType()); } }); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeohashMappingGeoPointTests.java deleted file mode 100644 index 05581e79021..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeohashMappingGeoPointTests.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.util.Collection; - -import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -/** - * - */ -public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testLatLonValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) - .endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), nullValue()); - assertThat(doc.rootDoc().getField("point.lon"), nullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } - - public void testLatLonInOneValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), nullValue()); - assertThat(doc.rootDoc().getField("point.lon"), nullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } - - public void testGeoHashValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), nullValue()); - assertThat(doc.rootDoc().getField("point.lon"), nullValue()); - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - assertThat(doc.rootDoc().get("point"), notNullValue()); - } - - public void testGeoHashPrecisionAsInteger() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.fieldType().geoHashPrecision(), is(10)); - } - - public void testGeoHashPrecisionAsLength() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.fieldType().geoHashPrecision(), is(10)); - } - - public void testNullValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", (Object) null) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("point"), nullValue()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java new file mode 100644 index 00000000000..426114cb389 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java @@ -0,0 +1,104 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.VersionUtils; + +import java.util.Collection; + +import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +/** + * + */ +public class LegacyGeohashMappingGeoPointTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + + public void testGeoHashValue() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) + .endObject().endObject().endObject().endObject().string(); + + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() + .parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("point", stringEncode(1.3, 1.2)) + .endObject() + .bytes()); + + assertThat(doc.rootDoc().getField("point.lat"), nullValue()); + assertThat(doc.rootDoc().getField("point.lon"), nullValue()); + assertThat(doc.rootDoc().getField("point.geohash").stringValue(), equalTo(stringEncode(1.3, 1.2))); + assertThat(doc.rootDoc().get("point"), notNullValue()); + } + + public void testGeoHashPrecisionAsInteger() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) + .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); + + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() + .parse("type", new CompressedXContent(mapping)); + FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); + assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); + BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; + assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); + } + + public void testGeoHashPrecisionAsLength() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) + .field("geohash_precision", "5m").endObject().endObject() + .endObject().endObject().string(); + + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() + .parse("type", new CompressedXContent(mapping)); + FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); + assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); + BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; + assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java index e11f0b90e85..8711ead6edf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java @@ -128,7 +128,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase { .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) .get(); assertThat(countResponse.getHits().totalHits(), equalTo(1L)); - countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.toString())).get(); + countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get(); assertThat(countResponse.getHits().totalHits(), equalTo(1L)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index ea020b5f487..9c9a80e63ca 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -266,8 +266,10 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); + if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + super.testToQuery(); + } } public void testNullFieldName() { @@ -254,6 +257,11 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase 0); - super.testMustRewrite(); + if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + super.testMustRewrite(); + } } public void testIgnoreUnmapped() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index dc518b9b76a..8f28d60206e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.query; import com.vividsolutions.jts.geom.Coordinate; +import org.apache.lucene.document.LatLonPoint; +import org.apache.lucene.geo.Polygon; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; @@ -30,6 +32,7 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.search.geo.GeoPolygonQuery; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -68,9 +71,10 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); - super.testToQuery(); + if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + super.testToQuery(); + } } private static List randomPolygon() { @@ -294,7 +300,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase @Override public void testToQuery() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - super.testToQuery(); + Version version = createShardContext().indexVersionCreated(); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + super.testToQuery(); + } } public void testNullField() { @@ -140,7 +145,10 @@ public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - super.testMustRewrite(); + Version version = createShardContext().indexVersionCreated(); + if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { + super.testMustRewrite(); + } } public void testIgnoreUnmapped() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index ea6fa65dddb..9cc19928eb1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -431,4 +431,22 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase parseQuery(shortJson)); assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage()); } + + public void testParseFailsWithTermsArray() throws Exception { + String json1 = "{\n" + + " \"match\" : {\n" + + " \"message1\" : {\n" + + " \"query\" : [\"term1\", \"term2\"]\n" + + " }\n" + + " }\n" + + "}"; + expectThrows(ParsingException.class, () -> parseQuery(json1)); + + String json2 = "{\n" + + " \"match\" : {\n" + + " \"message1\" : [\"term1\", \"term2\"]\n" + + " }\n" + + "}"; + expectThrows(IllegalStateException.class, () -> parseQuery(json2)); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index f122d66ebe3..e6bda8cec21 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -25,8 +25,10 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; @@ -49,6 +51,9 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage()); } + + public void testParseFailsWithMultipleFieldsWhenOneIsDate() throws IOException { + String json = + "{\n" + + " \"range\": {\n" + + " \"age\": {\n" + + " \"gte\": 30,\n" + + " \"lte\": 40\n" + + " },\n" + + " \"" + DATE_FIELD_NAME + "\": {\n" + + " \"gte\": \"2016-09-13 05:01:14\"\n" + + " }\n" + + " }\n" + + " }"; + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); + assertEquals("[range] query doesn't support multiple fields, found [age] and [" + DATE_FIELD_NAME + "]", e.getMessage()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 269694ed5ef..b5d3d69705a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.NodeEnvironment; @@ -109,6 +110,7 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import static java.util.Collections.emptyMap; @@ -121,12 +123,27 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; /** * Simple unit-test IndexShard related operations. */ public class IndexShardTests extends IndexShardTestCase { + public static ShardStateMetaData load(Logger logger, Path... shardPaths) throws IOException { + return ShardStateMetaData.FORMAT.loadLatestState(logger, shardPaths); + } + + public static void write(ShardStateMetaData shardStateMetaData, + Path... shardPaths) throws IOException { + ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); + } + + public static Engine getEngineFromShard(IndexShard shard) { + return shard.getEngineOrNull(); + } + public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { ShardId id = new ShardId("foo", "fooUUID", 1); @@ -323,10 +340,10 @@ public class IndexShardTests extends IndexShardTestCase { } case 2: { // relocation source - indexShard = newStartedShard(false); + indexShard = newStartedShard(true); ShardRouting routing = indexShard.routingEntry(); routing = TestShardRouting.newShardRouting(routing.shardId(), routing.currentNodeId(), "otherNode", - false, ShardRoutingState.RELOCATING, AllocationId.newRelocation(routing.allocationId())); + true, ShardRoutingState.RELOCATING, AllocationId.newRelocation(routing.allocationId())); indexShard.updateRoutingEntry(routing); indexShard.relocated("test"); break; @@ -371,15 +388,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(indexShard); } - public static ShardStateMetaData load(Logger logger, Path... shardPaths) throws IOException { - return ShardStateMetaData.FORMAT.loadLatestState(logger, shardPaths); - } - - public static void write(ShardStateMetaData shardStateMetaData, - Path... shardPaths) throws IOException { - ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); - } - public void testAcquireIndexCommit() throws IOException { final IndexShard shard = newStartedShard(); int numDocs = randomInt(20); @@ -443,7 +451,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } - public void testAsyncFsync() throws InterruptedException, IOException { IndexShard shard = newStartedShard(); Semaphore semaphore = new Semaphore(Integer.MAX_VALUE); @@ -500,7 +507,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(test); } - public void testShardStats() throws IOException { IndexShard shard = newStartedShard(); ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), @@ -662,6 +668,7 @@ public class IndexShardTests extends IndexShardTestCase { public void testLockingBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); @@ -692,6 +699,7 @@ public class IndexShardTests extends IndexShardTestCase { public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); Thread recoveryThread = new Thread(() -> { try { shard.relocated("simulated recovery"); @@ -725,6 +733,7 @@ public class IndexShardTests extends IndexShardTestCase { public void testStressRelocated() throws Exception { final IndexShard shard = newStartedShard(true); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); final int numThreads = randomIntBetween(2, 4); Thread[] indexThreads = new Thread[numThreads]; CountDownLatch allPrimaryOperationLocksAcquired = new CountDownLatch(numThreads); @@ -776,6 +785,75 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(shard); } + public void testRelocatedShardCanNotBeRevived() throws IOException, InterruptedException { + final IndexShard shard = newStartedShard(true); + final ShardRouting originalRouting = shard.routingEntry(); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(originalRouting, "other_node")); + shard.relocated("test"); + expectThrows(IllegalIndexShardStateException.class, () -> shard.updateRoutingEntry(originalRouting)); + closeShards(shard); + } + + public void testShardCanNotBeMarkedAsRelocatedIfRelocationCancelled() throws IOException, InterruptedException { + final IndexShard shard = newStartedShard(true); + final ShardRouting originalRouting = shard.routingEntry(); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(originalRouting, "other_node")); + shard.updateRoutingEntry(originalRouting); + expectThrows(IllegalIndexShardStateException.class, () -> shard.relocated("test")); + closeShards(shard); + } + + public void testRelocatedShardCanNotBeRevivedConcurrently() throws IOException, InterruptedException, BrokenBarrierException { + final IndexShard shard = newStartedShard(true); + final ShardRouting originalRouting = shard.routingEntry(); + shard.updateRoutingEntry(ShardRoutingHelper.relocate(originalRouting, "other_node")); + CyclicBarrier cyclicBarrier = new CyclicBarrier(3); + AtomicReference relocationException = new AtomicReference<>(); + Thread relocationThread = new Thread(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + relocationException.set(e); + } + + @Override + protected void doRun() throws Exception { + cyclicBarrier.await(); + shard.relocated("test"); + } + }); + relocationThread.start(); + AtomicReference cancellingException = new AtomicReference<>(); + Thread cancellingThread = new Thread(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + cancellingException.set(e); + } + + @Override + protected void doRun() throws Exception { + cyclicBarrier.await(); + shard.updateRoutingEntry(originalRouting); + } + }); + cancellingThread.start(); + cyclicBarrier.await(); + relocationThread.join(); + cancellingThread.join(); + if (shard.state() == IndexShardState.RELOCATED) { + logger.debug("shard was relocated successfully"); + assertThat(cancellingException.get(), instanceOf(IllegalIndexShardStateException.class)); + assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(true)); + assertThat(relocationException.get(), nullValue()); + } else { + logger.debug("shard relocation was cancelled"); + assertThat(relocationException.get(), instanceOf(IllegalIndexShardStateException.class)); + assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(false)); + assertThat(cancellingException.get(), nullValue()); + + } + closeShards(shard); + } + public void testRecoverFromStore() throws IOException { final IndexShard shard = newStartedShard(true); int translogOps = 1; @@ -1033,7 +1111,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(shard); } - public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "test", "0", "{\"foo\" : \"bar\"}"); @@ -1086,7 +1163,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } - public void testSearchIsReleaseIfWrapperFails() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "test", "0", "{\"foo\" : \"bar\"}"); @@ -1117,7 +1193,6 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(newShard); } - public void testTranslogRecoverySyncsTranslog() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) @@ -1362,8 +1437,4 @@ public class IndexShardTests extends IndexShardTestCase { public void verify(String verificationToken, DiscoveryNode localNode) { } } - - public static Engine getEngineFromShard(IndexShard shard) { - return shard.getEngineOrNull(); - } } diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index e1005c03b71..dbb52cac0cf 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -473,7 +473,7 @@ public class CorruptedFileIT extends ESIntegTestCase { * TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several * parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard. */ - @TestLogging("monitor.fs:DEBUG") + @TestLogging("org.elasticsearch.monitor.fs:DEBUG") public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { int numDocs = scaledRandomIntBetween(100, 1000); internalCluster().ensureAtLeastNumDataNodes(2); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java index 16a926b9e7a..95067992eed 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -44,7 +44,7 @@ public class IndexPrimaryRelocationIT extends ESIntegTestCase { private static final int RELOCATION_COUNT = 25; - @TestLogging("_root:DEBUG,action.delete:TRACE,action.index:TRACE,index.shard:TRACE,cluster.service:TRACE") + @TestLogging("_root:DEBUG,org.elasticsearch.action.delete:TRACE,org.elasticsearch.action.index:TRACE,index.shard:TRACE,org.elasticsearch.cluster.service:TRACE") public void testPrimaryRelocationWhileIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3)); client().admin().indices().prepareCreate("test") diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 3c60c209712..9c5b4e92043 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -358,7 +358,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { public boolean sawUpdateAutoThrottle; public MockAppender(final String name) throws IllegalAccessException { - super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null); + super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null); } @Override diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 71c96b85fd5..d0dd969d0c7 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -53,7 +53,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -@TestLogging("_root:DEBUG,index.shard:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE") public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final Logger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 8493a08d704..b661761e522 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -90,7 +90,7 @@ import static org.hamcrest.Matchers.startsWith; /** */ @ClusterScope(scope = Scope.TEST, numDataNodes = 0) -@TestLogging("_root:DEBUG,indices.recovery:TRACE,index.shard.service:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.indices.recovery:TRACE,org.elasticsearch.index.shard.service:TRACE") public class RelocationIT extends ESIntegTestCase { private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES); diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index 6fc0333fecf..abaefcf438e 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.rest.RestController; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; import java.nio.file.Path; import java.util.ArrayList; @@ -107,7 +108,7 @@ public class RestIndicesActionTests extends ESTestCase { clusterState.getClusterName().value(), indicesStr, clusterState, 0, 0, 0, TimeValue.timeValueMillis(1000L) ); - final Table table = action.buildTable(null, indices, clusterHealth, randomIndicesStatsResponse(indices), metaData); + final Table table = action.buildTable(new FakeRestRequest(), indices, clusterHealth, randomIndicesStatsResponse(indices), metaData); // now, verify the table is correct int count = 0; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index b23bce86b4e..1f453aa40f7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; +import org.elasticsearch.search.SearchExtRegistry; import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; @@ -74,73 +75,33 @@ import static org.hamcrest.Matchers.containsString; public class AggregatorParsingTests extends ESTestCase { - private static Injector injector; - private static Index index; + private String[] currentTypes; - private static String[] currentTypes; - - protected static String[] getCurrentTypes() { + protected String[] getCurrentTypes() { return currentTypes; } - private static NamedWriteableRegistry namedWriteableRegistry; - - protected static AggregatorParsers aggParsers; - protected static IndicesQueriesRegistry queriesRegistry; - protected static ParseFieldMatcher parseFieldMatcher; + protected AggregatorParsers aggParsers; + protected IndicesQueriesRegistry queriesRegistry; + protected ParseFieldMatcher parseFieldMatcher; /** * Setup for the whole base test class. */ - @BeforeClass - public static void init() throws IOException { + @Override + public void setUp() throws Exception { + super.setUp(); // we have to prefer CURRENT since with the range of versions we support // it's rather unlikely to get the current actually. - Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build(); - - index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - final ThreadPool threadPool = new ThreadPool(settings); - final ClusterService clusterService = createClusterService(threadPool); - setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() - .put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); - ScriptModule scriptModule = newTestScriptModule(); - List> scriptSettings = scriptModule.getSettings(); - scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); - SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList()); - - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) { - @Override - protected void configure() { - bindMapperExtension(); - } - }; + IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) ; SearchModule searchModule = new SearchModule(settings, false, emptyList()); List entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); - namedWriteableRegistry = new NamedWriteableRegistry(entries); - injector = new ModulesBuilder().add( - (b) -> { - b.bind(Environment.class).toInstance(new Environment(settings)); - b.bind(ThreadPool.class).toInstance(threadPool); - b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); - }, - settingsModule, indicesModule, searchModule, - new IndexSettingsModule(index, settings), - new AbstractModule() { - @Override - protected void configure() { - bind(ClusterService.class).toInstance(clusterService); - bind(CircuitBreakerService.class).toInstance(new NoneCircuitBreakerService()); - bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); - } - }).createInjector(); - aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers; + aggParsers = searchModule.getSearchRequestParsers().aggParsers; // create some random type with some default field, those types will // stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; @@ -148,21 +109,10 @@ public class AggregatorParsingTests extends ESTestCase { String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } - queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); + queriesRegistry = searchModule.getQueryParserRegistry(); parseFieldMatcher = ParseFieldMatcher.STRICT; } - @AfterClass - public static void afterClass() throws Exception { - injector.getInstance(ClusterService.class).close(); - terminate(injector.getInstance(ThreadPool.class)); - injector = null; - index = null; - aggParsers = null; - currentTypes = null; - namedWriteableRegistry = null; - } - public void testTwoTypes() throws Exception { String source = JsonXContent.contentBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 2cc1ca04f4d..ca5b98af300 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -19,46 +19,24 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; -import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; @@ -66,8 +44,6 @@ import java.util.Collections; import java.util.List; import static java.util.Collections.emptyList; -import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public abstract class BaseAggregationTestCase> extends ESTestCase { @@ -78,101 +54,47 @@ public abstract class BaseAggregationTestCase entries = new ArrayList<>(); + entries.addAll(indicesModule.getNamedWriteables()); + entries.addAll(searchModule.getNamedWriteables()); + namedWriteableRegistry = new NamedWriteableRegistry(entries); + queriesRegistry = searchModule.getQueryParserRegistry(); + aggParsers = searchModule.getSearchRequestParsers().aggParsers; //create some random type with some default field, those types will stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } - queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); parseFieldMatcher = ParseFieldMatcher.STRICT; } - public static final Injector buildInjector(Index index) { - // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. - Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); - Settings settings = Settings.builder() - .put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) - .build(); - - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - final ThreadPool threadPool = new ThreadPool(settings); - final ClusterService clusterService = createClusterService(threadPool); - setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() - .put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); - ScriptModule scriptModule = newTestScriptModule(); - List> scriptSettings = scriptModule.getSettings(); - scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); - SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList()); - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) { - @Override - protected void configure() { - bindMapperExtension(); - } - }; - SearchModule searchModule = new SearchModule(settings, false, emptyList()); - List entries = new ArrayList<>(); - entries.addAll(indicesModule.getNamedWriteables()); - entries.addAll(searchModule.getNamedWriteables()); - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries); - return new ModulesBuilder().add( - (b) -> { - b.bind(Environment.class).toInstance(new Environment(settings)); - b.bind(ThreadPool.class).toInstance(threadPool); - b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); - b.bind(ClusterService.class).toProvider(Providers.of(clusterService)); - b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); - }, - settingsModule, indicesModule, searchModule, new IndexSettingsModule(index, settings) - ).createInjector(); - } - - - @AfterClass - public static void afterClass() throws Exception { - injector.getInstance(ClusterService.class).close(); - terminate(injector.getInstance(ThreadPool.class)); - injector = null; - index = null; - aggParsers = null; - currentTypes = null; - namedWriteableRegistry = null; - } - /** * Generic test that creates new AggregatorFactory from the test * AggregatorFactory and checks both for equality and asserts equality on @@ -263,24 +185,6 @@ public abstract class BaseAggregationTestCase 0 && randomBoolean()) { - int numberOfQueryTypes = randomIntBetween(1, currentTypes.length); - types = new String[numberOfQueryTypes]; - for (int i = 0; i < numberOfQueryTypes; i++) { - types[i] = randomFrom(currentTypes); - } - } else { - if (randomBoolean()) { - types = new String[]{MetaData.ALL}; - } else { - types = new String[0]; - } - } - return types; - } - public String randomNumericField() { int randomInt = randomInt(3); switch (randomInt) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index b0952b2de60..0f4c539c794 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -20,29 +20,32 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.SearchRequestParsers; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; public abstract class BasePipelineAggregationTestCase> extends ESTestCase { @@ -53,59 +56,51 @@ public abstract class BasePipelineAggregationTestCase entries = new ArrayList<>(); + entries.addAll(indicesModule.getNamedWriteables()); + entries.addAll(searchModule.getNamedWriteables()); + namedWriteableRegistry = new NamedWriteableRegistry(entries); + queriesRegistry = searchModule.getQueryParserRegistry(); + aggParsers = searchModule.getSearchRequestParsers().aggParsers; //create some random type with some default field, those types will stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } - queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); parseFieldMatcher = ParseFieldMatcher.STRICT; } - @AfterClass - public static void afterClass() throws Exception { - injector.getInstance(ClusterService.class).close(); - terminate(injector.getInstance(ThreadPool.class)); - injector = null; - index = null; - aggParsers = null; - currentTypes = null; - namedWriteableRegistry = null; - } - /** * Generic test that creates new AggregatorFactory from the test * AggregatorFactory and checks both for equality and asserts equality on * the two queries. */ - public void testFromXContent() throws IOException { AF testAgg = createTestAggregatorFactory(); AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().skipResolveOrder().addPipelineAggregator(testAgg); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 5cc6ec58630..7cca4baadea 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -74,7 +74,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { public void setupSuiteScopeCluster() throws Exception { createIndex(UNMAPPED_IDX_NAME); assertAcked(prepareCreate(IDX_NAME) - .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point,geohash_prefix=true,geohash_precision=12", + .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword")); singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 0ecb50d584e..4d2b0d394a8 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -20,25 +20,14 @@ package org.elasticsearch.search.builder; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -47,15 +36,10 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -74,109 +58,42 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilderTests; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; public class SearchSourceBuilderTests extends ESTestCase { - private static Injector injector; - private static NamedWriteableRegistry namedWriteableRegistry; + private NamedWriteableRegistry namedWriteableRegistry; - private static SearchRequestParsers searchRequestParsers; + private SearchRequestParsers searchRequestParsers; - private static Index index; + private ParseFieldMatcher parseFieldMatcher; - private static String[] currentTypes; - - private static ParseFieldMatcher parseFieldMatcher; - - @BeforeClass - public static void init() throws IOException { + public void setUp() throws Exception { + super.setUp(); // we have to prefer CURRENT since with the range of versions we support // it's rather unlikely to get the current actually. - Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.builder() .put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build(); - - index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - final ThreadPool threadPool = new ThreadPool(settings); - final ClusterService clusterService = createClusterService(threadPool); - setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder() - .put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); - ScriptModule scriptModule = newTestScriptModule(); - List> scriptSettings = scriptModule.getSettings(); - scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); - SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList()); - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) { - @Override - protected void configure() { - bindMapperExtension(); - } - }; + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); SearchModule searchModule = new SearchModule(settings, false, Collections.singletonList(new FetchSubPhasePluginIT.FetchTermVectorsPlugin())); List entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); namedWriteableRegistry = new NamedWriteableRegistry(entries); - injector = new ModulesBuilder().add( - (b) -> { - b.bind(Environment.class).toInstance(new Environment(settings)); - b.bind(ThreadPool.class).toInstance(threadPool); - b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); - }, - settingsModule, indicesModule, searchModule, - new IndexSettingsModule(index, settings), - new AbstractModule() { - @Override - protected void configure() { - bind(ClusterService.class).toProvider(Providers.of(clusterService)); - bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); - } - } - ).createInjector(); - searchRequestParsers = injector.getInstance(SearchRequestParsers.class); - // create some random type with some default field, those types will - // stick around for all of the subclasses - currentTypes = new String[randomIntBetween(0, 5)]; - for (int i = 0; i < currentTypes.length; i++) { - String type = randomAsciiOfLengthBetween(1, 10); - currentTypes[i] = type; - } + searchRequestParsers = searchModule.getSearchRequestParsers(); parseFieldMatcher = ParseFieldMatcher.STRICT; } - @AfterClass - public static void afterClass() throws Exception { - injector.getInstance(ClusterService.class).close(); - terminate(injector.getInstance(ThreadPool.class)); - injector = null; - index = null; - searchRequestParsers = null; - currentTypes = null; - namedWriteableRegistry = null; - } - public static SearchSourceBuilder createSearchSourceBuilder() throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); if (randomBoolean()) { @@ -410,11 +327,11 @@ public class SearchSourceBuilderTests extends ESTestCase { assertParseSearchSource(testSearchSourceBuilder, builder.bytes()); } - private static void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes) throws IOException { + private void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes) throws IOException { assertParseSearchSource(testBuilder, searchSourceAsBytes, ParseFieldMatcher.STRICT); } - private static void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes, ParseFieldMatcher pfm) + private void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes, ParseFieldMatcher pfm) throws IOException { XContentParser parser = XContentFactory.xContent(searchSourceAsBytes).createParser(searchSourceAsBytes); QueryParseContext parseContext = new QueryParseContext(searchRequestParsers.queryParsers, parser, pfm); @@ -429,7 +346,7 @@ public class SearchSourceBuilderTests extends ESTestCase { assertEquals(testBuilder.hashCode(), newBuilder.hashCode()); } - private static QueryParseContext createParseContext(XContentParser parser) { + private QueryParseContext createParseContext(XContentParser parser) { return new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher); } @@ -474,7 +391,7 @@ public class SearchSourceBuilderTests extends ESTestCase { } //we use the streaming infra to create a copy of the builder provided as argument - protected static SearchSourceBuilder copyBuilder(SearchSourceBuilder builder) throws IOException { + protected SearchSourceBuilder copyBuilder(SearchSourceBuilder builder) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { builder.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index bf40335b946..19904be38b3 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -2727,7 +2728,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("properties") .startObject("geo_point") .field("type", "geo_point") - .field("geohash", true) .endObject() .startObject("text") .field("type", "text") @@ -2756,6 +2756,45 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(search.getHits().getAt(0).highlightFields().get("text").fragments().length, equalTo(1)); } + public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException { + // same as above but in this example the query gets rewritten during highlighting + // see https://github.com/elastic/elasticsearch/issues/17537#issuecomment-244939633 + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("jobs") + .startObject("_all") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("loc") + .field("type", "geo_point") + .endObject() + .startObject("jd") + .field("type", "string") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("jobs", mappings)); + ensureYellow(); + + client().prepareIndex("test", "jobs", "1") + .setSource(jsonBuilder().startObject().field("jd", "some आवश्यकता है- आर्य समाज अनाथालय, 68 सिविल लाइन्स, बरेली को एक पुरूष" + + " रस text") + .field("loc", "12.934059,77.610741").endObject()) + .get(); + refresh(); + + QueryBuilder query = QueryBuilders.functionScoreQuery(QueryBuilders.boolQuery().filter(QueryBuilders.geoBoundingBoxQuery("loc") + .setCorners(new GeoPoint(48.934059, 41.610741), new GeoPoint(-23.065941, 113.610741)))); + SearchResponse search = client().prepareSearch().setSource( + new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().highlighterType("plain").field("jd"))).get(); + assertNoFailures(search); + assertThat(search.getHits().totalHits(), equalTo(1L)); + } + + public void testKeywordFieldHighlighting() throws IOException { // check that keyword highlighting works XContentBuilder mappings = jsonBuilder(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java index 428751e8859..b923c2464d8 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java @@ -68,8 +68,11 @@ public class PlainHighlighterTests extends LuceneTestCase { String fragment = highlighter.getBestFragment(fieldNameAnalyzer.tokenStream("text", "Arbitrary text field which should not cause " + "a failure"), "Arbitrary text field which should not cause a failure"); assertThat(fragment, equalTo("Arbitrary text field which should not cause a failure")); - // TODO: This test will fail if we pass in an instance of GeoPointInBBoxQueryImpl too. Should we also find a way to work around that - // or can the query not be rewritten before it is passed into the highlighter? + Query rewritten = boolQuery.rewrite(null); + highlighter = new org.apache.lucene.search.highlight.Highlighter(new CustomQueryScorer(rewritten)); + fragment = highlighter.getBestFragment(fieldNameAnalyzer.tokenStream("text", "Arbitrary text field which should not cause " + + "a failure"), "Arbitrary text field which should not cause a failure"); + assertThat(fragment, equalTo("Arbitrary text field which should not cause a failure")); } public void testGeoPointInBBoxQueryHighlighting() throws IOException, InvalidTokenOffsetsException { diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index e6d7700c845..cc832f8a7d1 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.geo; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; @@ -48,6 +49,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -433,8 +435,10 @@ public class GeoFilterIT extends ESIntegTestCase { String name = hit.getId(); if (version.before(Version.V_2_2_0)) { point.resetFromString(hit.fields().get("pin").getValue().toString()); - } else { + } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { point.resetFromIndexHash(hit.fields().get("pin").getValue()); + } else { + point.resetFromString(hit.getFields().get("pin").getValue()); } double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); @@ -446,7 +450,7 @@ public class GeoFilterIT extends ESIntegTestCase { } } - public void testGeohashCellFilter() throws IOException { + public void testLegacyGeohashCellFilter() throws IOException { String geohash = randomhash(10); logger.info("Testing geohash_cell filter for [{}]", geohash); @@ -457,8 +461,11 @@ public class GeoFilterIT extends ESIntegTestCase { logger.info("Parent Neighbors {}", parentNeighbors); ensureYellow(); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - client().admin().indices().prepareCreate("locations").addMapping("location", "pin", "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet(); + client().admin().indices().prepareCreate("locations").setSettings(settings).addMapping("location", "pin", + "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet(); // Index a pin client().prepareIndex("locations", "location", "1").setCreate(true).setSource("pin", geohash).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index dedd47d3e43..20216e10593 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -32,6 +32,7 @@ import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class InternalSearchHitTests extends ESTestCase { @@ -63,19 +64,15 @@ public class InternalSearchHitTests extends ESTestCase { InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[]{hit1, hit2}, 2, 1f); - InternalSearchHits.StreamContext context = new InternalSearchHits.StreamContext(); - context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); BytesStreamOutput output = new BytesStreamOutput(); - hits.writeTo(output, context); + hits.writeTo(output); InputStream input = output.bytes().streamInput(); - context = new InternalSearchHits.StreamContext(); - context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); - InternalSearchHits results = InternalSearchHits.readSearchHits(new InputStreamStreamInput(input), context); + InternalSearchHits results = InternalSearchHits.readSearchHits(new InputStreamStreamInput(input)); assertThat(results.getAt(0).shard(), equalTo(target)); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).shard(), nullValue()); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).shard(), nullValue()); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).shard(), nullValue()); - assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).shard(), nullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).shard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).shard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).shard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).shard(), notNullValue()); assertThat(results.getAt(1).shard(), equalTo(target)); } diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index cb55f88ff80..693fffa307a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -260,8 +260,8 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .addSort("_uid", SortOrder.ASC) .setQuery(multiMatchQueryBuilder).get(); MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); - if (getType(multiMatchQueryBuilder) != null) { - matchQueryBuilder.type(MatchQuery.Type.valueOf(getType(multiMatchQueryBuilder).matchQueryType().toString())); + if (multiMatchQueryBuilder.getType() != null) { + matchQueryBuilder.type(MatchQuery.Type.valueOf(multiMatchQueryBuilder.getType().matchQueryType().toString())); } SearchResponse matchResp = client().prepareSearch("test") // _uid tie sort @@ -569,9 +569,10 @@ public class MultiMatchQueryIT extends ESIntegTestCase { // test if boosts work searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").field("last_name", 10) + .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "category").field("last_name", 10) .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .operator(Operator.AND))).get(); + assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted assertSecondHit(searchResponse, hasId("ultimate2")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); @@ -582,6 +583,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .operator(Operator.AND))).get(); + assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("ultimate2")); assertSecondHit(searchResponse, hasId("ultimate1")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); @@ -590,28 +592,33 @@ public class MultiMatchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("15", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("15", "skill", "first_name") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); // Two numeric fields together caused trouble at one point! searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("15", "int-field", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("alpha 15", "first_name", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .lenient(true))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("ultimate1")); /* * Doesn't find theone because "alpha 15" isn't a number and we don't @@ -624,6 +631,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("alpha 15", "int-field", "first_name", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .lenient(true))).get(); + assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("ultimate1")); } @@ -647,21 +655,21 @@ public class MultiMatchQueryIT extends ESIntegTestCase { } } - public static List fill(List list, String value, int times) { + private static List fill(List list, String value, int times) { for (int i = 0; i < times; i++) { list.add(value); } return list; } - public List fillRandom(List list, int times) { + private static List fillRandom(List list, int times) { for (int i = 0; i < times; i++) { - list.add(randomAsciiOfLengthBetween(1, 5)); + list.add(randomAsciiOfLength(5)); } return list; } - public T randomPickExcept(List fromList, T butNot) { + private static T randomPickExcept(List fromList, T butNot) { while (true) { T t = RandomPicks.randomFrom(random(), fromList); if (t.equals(butNot)) { @@ -671,9 +679,9 @@ public class MultiMatchQueryIT extends ESIntegTestCase { } } - public MultiMatchQueryBuilder randomizeType(MultiMatchQueryBuilder builder) { + private static MultiMatchQueryBuilder randomizeType(MultiMatchQueryBuilder builder) { try { - MultiMatchQueryBuilder.Type type = getType(builder); + MultiMatchQueryBuilder.Type type = builder.getType(); if (type == null && randomBoolean()) { return builder; } @@ -715,8 +723,4 @@ public class MultiMatchQueryIT extends ESIntegTestCase { throw new RuntimeException(ex); } } - - private MultiMatchQueryBuilder.Type getType(MultiMatchQueryBuilder builder) throws NoSuchFieldException, IllegalAccessException { - return builder.getType(); - } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 78c77e15f38..6d1c64437d6 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -122,7 +122,6 @@ public class FieldSortIT extends ESIntegTestCase { } } - @LuceneTestCase.BadApple(bugUrl = "simon is working on this") public void testIssue6614() throws ExecutionException, InterruptedException { List builders = new ArrayList<>(); boolean strictTimeBasedIndices = randomBoolean(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index b78496ff18d..f39b3ff92fc 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -67,8 +67,8 @@ public class GeoDistanceIT extends ESIntegTestCase { return Arrays.asList(InternalSettingsPlugin.class); } - public void testSimpleDistance() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + public void testLegacyGeoDistanceRangeQuery() throws Exception { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("location").field("type", "geo_point"); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 9fae823c43b..0901a6201a2 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -21,8 +21,10 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -38,6 +40,7 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -557,7 +560,8 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { } public void testGeoField() throws Exception { - +// Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); +// Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder mapping = jsonBuilder(); mapping.startObject(); mapping.startObject(TYPE); diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java index 786319e2aee..42977105058 100644 --- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java +++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java @@ -40,7 +40,7 @@ public class MockLogAppender extends AbstractAppender { private List expectations; public MockLogAppender() throws IllegalAccessException { - super("mock", RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null); + super("mock", RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null); expectations = new ArrayList<>(); } diff --git a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java index f65e3a742f6..2fb63335873 100644 --- a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java +++ b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java @@ -69,33 +69,33 @@ public class SimpleTimestampIT extends ESIntegTestCase { long now2 = System.currentTimeMillis(); // non realtime get (stored) - GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); assertThat(timestamp, greaterThanOrEqualTo(now1)); assertThat(timestamp, lessThanOrEqualTo(now2)); // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); logger.info("--> check with custom timestamp (numeric)"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("10").setRefreshPolicy(IMMEDIATE).get(); - getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); assertThat(timestamp, equalTo(10L)); // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); logger.info("--> check with custom timestamp (string)"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("1970-01-01T00:00:00.020") .setRefreshPolicy(IMMEDIATE).get(); - getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); assertThat(timestamp, equalTo(20L)); // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); } diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index 87bf66d64b1..5716e57c96f 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -117,7 +117,7 @@ public class SimpleTTLIT extends ESIntegTestCase { // realtime get check long currentTime = System.currentTimeMillis(); - GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").get(); + GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); long ttl0; if (getResponse.isExists()) { ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); @@ -127,7 +127,7 @@ public class SimpleTTLIT extends ESIntegTestCase { } // verify the ttl is still decreasing when going to the replica currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").get(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); if (getResponse.isExists()) { ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); @@ -136,7 +136,7 @@ public class SimpleTTLIT extends ESIntegTestCase { } // non realtime get (stored) currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).get(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); if (getResponse.isExists()) { ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); @@ -145,7 +145,7 @@ public class SimpleTTLIT extends ESIntegTestCase { } // non realtime get going the replica currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).get(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); if (getResponse.isExists()) { ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); @@ -154,10 +154,10 @@ public class SimpleTTLIT extends ESIntegTestCase { } // no TTL provided so no TTL fetched - getResponse = client().prepareGet("test", "type1", "no_ttl").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "no_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); assertThat(getResponse.getField("_ttl"), nullValue()); // no TTL provided make sure it has default TTL - getResponse = client().prepareGet("test", "type2", "default_ttl").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type2", "default_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl0, greaterThan(0L)); @@ -190,28 +190,28 @@ public class SimpleTTLIT extends ESIntegTestCase { )); // realtime get check - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); // replica realtime get check - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(true).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); // Need to run a refresh, in order for the non realtime get to work. client().admin().indices().prepareRefresh("test").execute().actionGet(); // non realtime get (stored) check - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); // non realtime get going the replica check - getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(false).execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); } @@ -287,7 +287,7 @@ public class SimpleTTLIT extends ESIntegTestCase { } private long getTtl(String type, Object id) { - GetResponse getResponse = client().prepareGet("test", type, id.toString()).setFields("_ttl").execute() + GetResponse getResponse = client().prepareGet("test", type, id.toString()).setStoredFields("_ttl").execute() .actionGet(); return ((Number) getResponse.getField("_ttl").getValue()).longValue(); } diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java index e81b4decb2d..ac142fa461d 100644 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java @@ -123,12 +123,12 @@ public class TimestampTTLBWIT extends ESIntegTestCase { // check TTL is kept after an update without TTL client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get(); - GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); client().prepareUpdate(indexOrAlias(), "type1", "2") .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); @@ -136,7 +136,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { client().prepareUpdate(indexOrAlias(), "type1", "2") .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); assertThat(ttl, greaterThan(0L)); assertThat(ttl, lessThanOrEqualTo(3600000L)); @@ -147,7 +147,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() .actionGet(); - getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet(); + getResponse = client().prepareGet("test", "type1", "3").setStoredFields("_timestamp").execute().actionGet(); long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); assertThat(timestamp, equalTo(1258294332000L)); } diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 7ea68dc10f6..fc360effb03 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -469,7 +469,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) - .setFields("_source") + .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -549,7 +549,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) - .setFields("_source") + .setFetchSource(true) .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -624,14 +624,30 @@ public class UpdateIT extends ESIntegTestCase { // check fields parameter client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).setFields("_source", "field") - .execute().actionGet(); + .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)) + .setFields("field") + .setFetchSource(true) + .execute().actionGet(); assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().sourceRef(), notNullValue()); assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue()); + // check _source parameter + client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); + updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + .setScript(new Script("field1", ScriptService.ScriptType.INLINE, "field_inc", null)) + .setFetchSource("field1", "field2") + .get(); + assertThat(updateResponse.getIndex(), equalTo("test")); + assertThat(updateResponse.getGetResult(), notNullValue()); + assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); + assertThat(updateResponse.getGetResult().sourceRef(), notNullValue()); + assertThat(updateResponse.getGetResult().field("field1"), nullValue()); + assertThat(updateResponse.getGetResult().sourceAsMap().size(), equalTo(1)); + assertThat(updateResponse.getGetResult().sourceAsMap().get("field1"), equalTo(2)); + // check updates without script // add new field client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 33abbf96345..883a62210c9 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -65,6 +65,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "analysis-stempel", "discovery-azure-classic", "discovery-ec2", + "discovery-file", "discovery-gce", "ingest-attachment", "ingest-geoip", diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index 185f558ff02..a0dc33b9ad4 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -36,6 +36,17 @@ task buildRpm(type: Rpm) { fileMode 0644 addParentDirs false // TODO ospackage doesn't support icon but we used to have one + + // Declare the folders so that the RPM package manager removes + // them when upgrading or removing the package + directory('/usr/share/elasticsearch/bin', 0755) + directory('/usr/share/elasticsearch/lib', 0755) + directory('/usr/share/elasticsearch/modules', 0755) + modulesFiles.eachFile { FileCopyDetails fcp -> + if (fcp.name == "plugin-descriptor.properties") { + directory('/usr/share/elasticsearch/modules/' + fcp.file.parentFile.name, 0755) + } + } } artifacts { diff --git a/distribution/src/main/packaging/scripts/prerm b/distribution/src/main/packaging/scripts/prerm index 592f2f9948f..bca03b25766 100644 --- a/distribution/src/main/packaging/scripts/prerm +++ b/distribution/src/main/packaging/scripts/prerm @@ -79,12 +79,13 @@ if [ "$REMOVE_SERVICE" = "true" ]; then if command -v update-rc.d >/dev/null; then update-rc.d elasticsearch remove >/dev/null || true fi + + SCRIPTS_DIR="/etc/elasticsearch/scripts" + # delete the scripts directory if and only if empty + if [ -d "$SCRIPTS_DIR" ]; then + rmdir --ignore-fail-on-non-empty "$SCRIPTS_DIR" + fi fi -SCRIPTS_DIR="/etc/elasticsearch/scripts" -# delete the scripts directory if and only if empty -if [ -d "$SCRIPTS_DIR" ]; then - rmdir --ignore-fail-on-non-empty "$SCRIPTS_DIR" -fi ${scripts.footer} diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 8971bc2bc19..0554371a1f9 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -21,10 +21,17 @@ ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -p ${PID_DIR}/elasticsearch.pid \ + --quiet \ -Edefault.path.logs=${LOG_DIR} \ -Edefault.path.data=${DATA_DIR} \ -Edefault.path.conf=${CONF_DIR} +# StandardOutput is configured to redirect to journalctl since +# some error messages may be logged in standard output before +# elasticsearch logging system is initialized. Elasticsearch +# stores its logs in /var/log/elasticsearch and does not use +# journalctl by default. If you also want to enable journalctl +# logging, you can simply remove the "quiet" option from ExecStart. StandardOutput=journal StandardError=inherit diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index 06f8c5b8c27..098d9124498 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -82,9 +82,10 @@ HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME declare -a args=("$@") +path_props=(-Des.path.home="$ES_HOME") if [ -e "$CONF_DIR" ]; then - args=("${args[@]}" -Edefault.path.conf="$CONF_DIR") + path_props=("${path_props[@]}" -Des.path.conf="$CONF_DIR") fi -exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "${args[@]}" +exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch "${path_props[@]}" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "${args[@]}" diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin.bat b/distribution/src/main/resources/bin/elasticsearch-plugin.bat index 54c0b86b880..58b749d6b83 100644 --- a/distribution/src/main/resources/bin/elasticsearch-plugin.bat +++ b/distribution/src/main/resources/bin/elasticsearch-plugin.bat @@ -17,9 +17,14 @@ for %%I in ("%SCRIPT_DIR%..") do set ES_HOME=%%~dpfI TITLE Elasticsearch Plugin Manager ${project.version} +SET path_props=-Des.path.home="%ES_HOME%" +IF DEFINED CONF_DIR ( + SET path_props=!path_props! -Des.path.conf="%CONF_DIR%" +) + SET args=%* SET HOSTNAME=%COMPUTERNAME% -"%JAVA%" %ES_JAVA_OPTS% -Des.path.home="%ES_HOME%" -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! +"%JAVA%" %ES_JAVA_OPTS% !path_props! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! ENDLOCAL diff --git a/distribution/src/main/resources/config/log4j2.properties b/distribution/src/main/resources/config/log4j2.properties index 2cfe038cc84..9a3147f5a23 100644 --- a/distribution/src/main/resources/config/log4j2.properties +++ b/distribution/src/main/resources/config/log4j2.properties @@ -1,19 +1,19 @@ status = error # log action execution errors for easier debugging -logger.action.name = action +logger.action.name = org.elasticsearch.action logger.action.level = debug appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n appender.rolling.type = RollingFile appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs}.log appender.rolling.layout.type = PatternLayout -appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.10000m%n appender.rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy @@ -28,7 +28,7 @@ appender.deprecation_rolling.type = RollingFile appender.deprecation_rolling.name = deprecation_rolling appender.deprecation_rolling.fileName = ${sys:es.logs}_deprecation.log appender.deprecation_rolling.layout.type = PatternLayout -appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n +appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.10000m%n appender.deprecation_rolling.filePattern = ${sys:es.logs}_deprecation-%i.log.gz appender.deprecation_rolling.policies.type = Policies appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -36,7 +36,7 @@ appender.deprecation_rolling.policies.size.size = 1GB appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy appender.deprecation_rolling.strategy.max = 4 -logger.deprecation.name = deprecation +logger.deprecation.name = org.elasticsearch.deprecation logger.deprecation.level = warn logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling logger.deprecation.additivity = false @@ -45,7 +45,7 @@ appender.index_search_slowlog_rolling.type = RollingFile appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.fileName = ${sys:es.logs}_index_search_slowlog.log appender.index_search_slowlog_rolling.layout.type = PatternLayout -appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.10000m%n appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs}_index_search_slowlog-%d{yyyy-MM-dd}.log appender.index_search_slowlog_rolling.policies.type = Policies appender.index_search_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy @@ -61,7 +61,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs}_index_indexing_slowlog.log appender.index_indexing_slowlog_rolling.layout.type = PatternLayout -appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.10000m%n appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs}_index_indexing_slowlog-%d{yyyy-MM-dd}.log appender.index_indexing_slowlog_rolling.policies.type = Policies appender.index_indexing_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy diff --git a/docs/java-api/query-dsl/geo-queries.asciidoc b/docs/java-api/query-dsl/geo-queries.asciidoc index 18ed0f37c14..55184bde326 100644 --- a/docs/java-api/query-dsl/geo-queries.asciidoc +++ b/docs/java-api/query-dsl/geo-queries.asciidoc @@ -30,12 +30,6 @@ The queries in this group are: Find documents with geo-points within the specified polygon. -<> query:: - - Find geo-points whose geohash intersects with the geohash of the specified - point. - - include::geo-shape-query.asciidoc[] include::geo-bounding-box-query.asciidoc[] @@ -45,5 +39,3 @@ include::geo-distance-query.asciidoc[] include::geo-distance-range-query.asciidoc[] include::geo-polygon-query.asciidoc[] - -include::geohash-cell-query.asciidoc[] diff --git a/docs/java-api/query-dsl/geohash-cell-query.asciidoc b/docs/java-api/query-dsl/geohash-cell-query.asciidoc deleted file mode 100644 index 7aad54892a5..00000000000 --- a/docs/java-api/query-dsl/geohash-cell-query.asciidoc +++ /dev/null @@ -1,17 +0,0 @@ -[[java-query-dsl-geohash-cell-query]] -==== Geohash Cell Query - -See {ref}/query-dsl-geohash-cell-query.html[Geohash Cell Query] - -[source,java] --------------------------------------------------- -QueryBuilder qb = geoHashCellQuery("pin.location", <1> - new GeoPoint(13.4080, 52.5186)) <2> - .neighbors(true) <3> - .precision(3); <4> --------------------------------------------------- -<1> field -<2> point. Can also be a hash like `u30` -<3> The `neighbors` option of the filter offers the possibility to filter cells - next to the given cell. -<4> precision level diff --git a/docs/plugins/discovery-file.asciidoc b/docs/plugins/discovery-file.asciidoc new file mode 100644 index 00000000000..a848cdd6ff1 --- /dev/null +++ b/docs/plugins/discovery-file.asciidoc @@ -0,0 +1,93 @@ +[[discovery-file]] +=== File-Based Discovery Plugin + +The file-based discovery plugin uses a list of hosts/ports in a `unicast_hosts.txt` file +in the `config/discovery-file` directory for unicast discovery. + +[[discovery-file-install]] +[float] +==== Installation + +This plugin can be installed using the plugin manager: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin install discovery-file +---------------------------------------------------------------- + +The plugin must be installed on every node in the cluster, and each node must +be restarted after installation. Note that installing the plugin will add a +`discovery-file` directory to the `config` folder, and a default `unicast_hosts.txt` +file that must be edited with the correct unicast hosts list before starting the node. + +[[discovery-file-remove]] +[float] +==== Removal + +The plugin can be removed with the following command: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin remove discovery-file +---------------------------------------------------------------- + +The node must be stopped before removing the plugin. + +[[discovery-file-usage]] +[float] +==== Using the file-based discovery plugin + +The file-based discovery plugin provides the ability to specify the +unicast hosts list through a simple `unicast_hosts.txt` file that can +be dynamically updated at any time. The discovery type for this plugin +is still the default `zen` plugin, so no changes are required to the +`elasticsearch.yml` config file. This plugin simply provides a facility +to supply the unicast hosts list for zen discovery through an external +file that can be updated at any time by a side process. + +For example, this gives a convenient mechanism for an Elasticsearch instance +that is run in docker containers to be dynamically supplied a list of IP +addresses to connect to for zen discovery when those IP addresses may not be +known at node startup. + +Note that the file-based discovery plugin is meant to augment the unicast +hosts list in `elasticsearch.yml` (if specified), not replace it. Therefore, +if there are valid unicast host entries in `discovery.zen.ping.unicast.hosts`, +they will be used in addition to those supplied in `unicast_hosts.txt`. + +Anytime a change is made to the `unicast_hosts.txt` file, even as Elasticsearch +continues to run, the new changes will be picked up by the plugin and the +new hosts list will be used for the next pinging round for master election. + +Upon installation of the plugin, a default `unicast_hosts.txt` file will +be found in the `$CONFIG_DIR/discovery-file` directory. This default file +will contain some comments about what the file should contain. All comments +for this file must appear on their lines starting with `#` (i.e. comments +cannot start in the middle of a line). + +[[discovery-file-format]] +[float] +==== unicast_hosts.txt file format + +The format of the file is to specify one unicast host entry per line. +Each unicast host entry consists of the host (host name or IP address) and +an optional transport port number. If the port number is specified, is must +come immediately after the host (on the same line) separated by a `:`. +If the port number is not specified, a default value of 9300 is used. + +For example, this is an example of `unicast_hosts.txt` for a cluster with +four nodes that participate in unicast discovery, some of which are not +running on the default port: + +[source,txt] +---------------------------------------------------------------- +10.10.10.5 +10.10.10.6:9305 +10.10.10.5:10005 +# an IPv6 address +[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:9301 +---------------------------------------------------------------- + +Host names are allowed instead of IP addresses (similar to +`discovery.zen.ping.unicast.hosts`), and IPv6 addresses must be +specified in brackets with the port coming after the brackets. diff --git a/docs/plugins/discovery.asciidoc b/docs/plugins/discovery.asciidoc index 999bf9c0e1f..96a1c1e7b28 100644 --- a/docs/plugins/discovery.asciidoc +++ b/docs/plugins/discovery.asciidoc @@ -21,6 +21,10 @@ The Azure Classic discovery plugin uses the Azure Classic API for unicast discov The Google Compute Engine discovery plugin uses the GCE API for unicast discovery. +<>:: + +The File-based discovery plugin allows providing the unicast hosts list through a dynamically updatable file. + [float] ==== Community contributed discovery plugins @@ -37,3 +41,5 @@ include::discovery-azure-classic.asciidoc[] include::discovery-gce.asciidoc[] +include::discovery-file.asciidoc[] + diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 1626be6c8e6..ec70a125b65 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -154,3 +154,48 @@ returns this: } -------------------------------------------------- // TESTRESPONSE + + +Not all IP addresses find geo information from the database, When this +occurs, no `target_field` is inserted into the document. + +Here is an example of what documents will be indexed as when information for "93.114.45.13" +cannot be found: + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/geoip +{ + "description" : "Add geoip info", + "processors" : [ + { + "geoip" : { + "field" : "ip" + } + } + ] +} +PUT my_index/my_type/my_id?pipeline=geoip +{ + "ip": "93.114.45.13" +} +GET my_index/my_type/my_id +-------------------------------------------------- +// CONSOLE + +Which returns: + +[source,js] +-------------------------------------------------- +{ + "found": true, + "_index": "my_index", + "_type": "my_type", + "_id": "my_id", + "_version": 1, + "_source": { + "ip": "93.114.45.13" + } +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index ea2e28a92ec..06c3b9409eb 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -460,10 +460,6 @@ Centimeter:: `cm` or `centimeters` Millimeter:: `mm` or `millimeters` Nautical mile:: `NM`, `nmi` or `nauticalmiles` -The `precision` parameter in the <> accepts -distances with the above units, but if no unit is specified, then the -precision is interpreted as the length of the geohash. - [[fuzziness]] [float] === Fuzziness diff --git a/docs/reference/cat/indices.asciidoc b/docs/reference/cat/indices.asciidoc index 3e8938faab7..22be4067f8a 100644 --- a/docs/reference/cat/indices.asciidoc +++ b/docs/reference/cat/indices.asciidoc @@ -33,7 +33,7 @@ Which indices are yellow? [source,sh] -------------------------------------------------- -% curl localhost:9200/_cat/indices | grep ^yell +% curl localhost:9200/_cat/indices?health=yellow yellow open wiki 2 1 6401 1115 151.4mb 151.4mb yellow open twitter 5 1 11434 0 32mb 32mb -------------------------------------------------- @@ -52,7 +52,7 @@ How many merge operations have the shards for the `wiki` completed? [source,sh] -------------------------------------------------- -% curl 'localhost:9200/_cat/indices/wiki?pri&v&h=health,index,prirep,docs.count,mt' +% curl 'localhost:9200/_cat/indices/wiki?pri&v&h=health,index,pri,rep,docs.count,mt' health index docs.count mt pri.mt green wiki 9646 16 16 -------------------------------------------------- diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index b139e761490..104653b2481 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -154,7 +154,7 @@ times an update should be retried in the case of a version conflict. The `update` action payload, supports the following options: `doc` (partial document), `upsert`, `doc_as_upsert`, `script`, `params` (for -script), `lang` (for script) and `fields`. See update documentation for details on +script), `lang` (for script) and `_source`. See update documentation for details on the options. Curl example with update actions: [source,js] @@ -165,10 +165,10 @@ the options. Curl example with update actions: { "script" : { "inline": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"}, "doc_as_upsert" : true } -{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "fields" : ["_source"]} } +{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "_source" : true} } { "doc" : {"field" : "value"} } { "update" : {"_id" : "4", "_type" : "type1", "_index" : "index1"} } -{ "doc" : {"field" : "value"}, "fields": ["_source"]} +{ "doc" : {"field" : "value"}, "_source": true} -------------------------------------------------- [float] diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index 4f504f21320..ea398b46a53 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -52,10 +52,6 @@ call in-place to make the document visible. This will also make other documents changed since the last refresh visible. In order to disable realtime GET, one can set the `realtime` parameter to `false`. -When getting a document, one can specify `fields` to fetch from it. They -will, when possible, be fetched as stored fields (fields mapped as -<> in the mapping). - [float] [[type]] === Optional Type @@ -69,7 +65,7 @@ to fetch the first document matching the id across all types. === Source filtering By default, the get operation returns the contents of the `_source` field unless -you have used the `fields` parameter or if the `_source` field is disabled. +you have used the `stored_fields` parameter or if the `_source` field is disabled. You can turn off `_source` retrieval by using the `_source` parameter: [source,js] @@ -96,25 +92,122 @@ curl -XGET 'http://localhost:9200/twitter/tweet/1?_source=*.id,retweeted' [float] -[[get-fields]] -=== Fields +[[get-stored-fields]] +=== Stored Fields The get operation allows specifying a set of stored fields that will be -returned by passing the `fields` parameter. For example: +returned by passing the `stored_fields` parameter. +If the requested fields are not stored, they will be ignored. +Consider for instance the following mapping: [source,js] -------------------------------------------------- -curl -XGET 'http://localhost:9200/twitter/tweet/1?fields=title,content' +PUT twitter +{ + "mappings": { + "tweet": { + "properties": { + "counter": { + "type": "integer", + "store": false + }, + "tags": { + "type": "keyword", + "store": true + } + } + } + } +} -------------------------------------------------- +// CONSOLE -For backward compatibility, if the requested fields are not stored, they will be fetched -from the `_source` (parsed and extracted). This functionality has been replaced by the -<> parameter. +Now we can add a document: -Field values fetched from the document it self are always returned as an array. Metadata fields like `_routing` and -`_parent` fields are never returned as an array. +[source,js] +-------------------------------------------------- +PUT twitter/tweet/1 +{ + "counter" : 1, + "tags" : ["red"] +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] -Also only leaf fields can be returned via the `field` option. So object fields can't be returned and such requests +... and try to retrieve it: + +[source,js] +-------------------------------------------------- +GET twitter/tweet/1?stored_fields=tags,counter +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The result of the above get operation is: + +[source,js] +-------------------------------------------------- +{ + "_index": "twitter", + "_type": "tweet", + "_id": "1", + "_version": 1, + "found": true, + "fields": { + "tags": [ + "red" + ] + } +} +-------------------------------------------------- +// TESTRESPONSE + + +Field values fetched from the document it self are always returned as an array. +Since the `counter` field is not stored the get request simply ignores it when trying to get the `stored_fields.` + +It is also possible to retrieve metadata fields like `_routing` and `_parent` fields: + +[source,js] +-------------------------------------------------- +PUT twitter/tweet/2?routing=user1 +{ + "counter" : 1, + "tags" : ["white"] +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +[source,js] +-------------------------------------------------- +GET twitter/tweet/2?routing=user1&stored_fields=tags,counter +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The result of the above get operation is: + +[source,js] +-------------------------------------------------- +{ + "_index": "twitter", + "_type": "tweet", + "_id": "2", + "_version": 1, + "_routing": "user1", + "found": true, + "fields": { + "tags": [ + "white" + ] + } +} +-------------------------------------------------- +// TESTRESPONSE + +Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests will fail. [float] diff --git a/docs/reference/docs/multi-get.asciidoc b/docs/reference/docs/multi-get.asciidoc index 15dec9aea25..21693cc5145 100644 --- a/docs/reference/docs/multi-get.asciidoc +++ b/docs/reference/docs/multi-get.asciidoc @@ -155,7 +155,7 @@ curl 'localhost:9200/_mget' -d '{ [[mget-fields]] === Fields -Specific stored fields can be specified to be retrieved per document to get, similar to the <> parameter of the Get API. +Specific stored fields can be specified to be retrieved per document to get, similar to the <> parameter of the Get API. For example: [source,js] @@ -166,31 +166,31 @@ curl 'localhost:9200/_mget' -d '{ "_index" : "test", "_type" : "type", "_id" : "1", - "fields" : ["field1", "field2"] + "stored_fields" : ["field1", "field2"] }, { "_index" : "test", "_type" : "type", "_id" : "2", - "fields" : ["field3", "field4"] + "stored_fields" : ["field3", "field4"] } ] }' -------------------------------------------------- -Alternatively, you can specify the `fields` parameter in the query string +Alternatively, you can specify the `stored_fields` parameter in the query string as a default to be applied to all documents. [source,js] -------------------------------------------------- -curl 'localhost:9200/test/type/_mget?fields=field1,field2' -d '{ +curl 'localhost:9200/test/type/_mget?stored_fields=field1,field2' -d '{ "docs" : [ { "_id" : "1" <1> }, { "_id" : "2", - "fields" : ["field3", "field4"] <2> + "stored_fields" : ["field3", "field4"] <2> } ] }' @@ -201,7 +201,7 @@ curl 'localhost:9200/test/type/_mget?fields=field1,field2' -d '{ [float] === Generated fields -See <> for fields are generated only when indexing. +See <> for fields generated only when indexing. [float] [[mget-routing]] diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 0cf942121af..ff4c4c657d7 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -17,11 +17,13 @@ For example, lets index a simple doc: [source,js] -------------------------------------------------- -curl -XPUT localhost:9200/test/type1/1 -d '{ +PUT test/type1/1 +{ "counter" : 1, "tags" : ["red"] -}' +} -------------------------------------------------- +// CONSOLE [float] === Scripted updates @@ -30,7 +32,8 @@ Now, we can execute a script that would increment the counter: [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "script" : { "inline": "ctx._source.counter += params.count", "lang": "painless", @@ -38,15 +41,18 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "count" : 4 } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] We can add a tag to the list of tags (note, if the tag exists, it will still add it, since its a list): [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "script" : { "inline": "ctx._source.tags.add(params.tag)", "lang": "painless", @@ -54,8 +60,10 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "tag" : "blue" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] In addition to `_source`, the following variables are available through the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing`, @@ -65,36 +73,45 @@ We can also add a new field to the document: [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.name_of_new_field = \"value_of_new_field\"" -}' +POST test/type1/1/_update +{ + "script" : "ctx._source.new_field = \"value_of_new_field\"" +} -------------------------------------------------- +// CONSOLE +// TEST[continued] Or remove a field from the document: [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.remove(\"name_of_field\")" -}' +POST test/type1/1/_update +{ + "script" : "ctx._source.remove(\"new_field\")" +} -------------------------------------------------- +// CONSOLE +// TEST[continued] And, we can even change the operation that is executed. This example deletes -the doc if the `tags` field contain `blue`, otherwise it does nothing +the doc if the `tags` field contain `green`, otherwise it does nothing (`noop`): [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "script" : { - "inline": "ctx._source.tags.contains(params.tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", + "inline": "if (ctx._source.tags.contains(params.tag)) { ctx.op = \"delete\" } else { ctx.op = \"none\" }", "lang": "painless", "params" : { - "tag" : "blue" + "tag" : "green" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] [float] === Updates with a partial document @@ -106,31 +123,36 @@ example: [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "doc" : { "name" : "new_name" } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] If both `doc` and `script` are specified, then `doc` is ignored. Best is to put your field pairs of the partial document in the script itself. [float] === Detecting noop updates -If `doc` is specified its value is merged with the existing `_source`. By -default the document is only reindexed if the new `_source` field differs from -the old. Setting `detect_noop` to `false` will cause Elasticsearch to always -update the document even if it hasn't changed. For example: + +If `doc` is specified its value is merged with the existing `_source`. +By default updates that don't change anything detect that they don't change anything and return "result": "noop" like this: + [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "doc" : { "name" : "new_name" - }, - "detect_noop": false -}' + } +} -------------------------------------------------- +// CONSOLE +// TEST[continued] If `name` was `new_name` before the request was sent then the entire update request is ignored. The `result` element in the response returns `noop` if @@ -139,13 +161,34 @@ the request was ignored. [source,js] -------------------------------------------------- { + "_shards": { + "total": 0, + "successful": 0, + "failed": 0 + }, "_index": "test", "_type": "type1", "_id": "1", - "_version": 1, + "_version": 6, "result": noop } -------------------------------------------------- +// TESTRESPONSE + +You can disable this behavior by setting "detect_noop": false like this: + +[source,js] +-------------------------------------------------- +POST test/type1/1/_update +{ + "doc" : { + "name" : "new_name" + }, + "detect_noop": true +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] [[upserts]] [float] @@ -157,7 +200,8 @@ will be inserted as a new document. If the document does exist, then the [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "script" : { "inline": "ctx._source.counter += params.count", "lang": "painless", @@ -168,8 +212,10 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "upsert" : { "counter" : 1 } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] [float] ==== `scripted_upsert` @@ -180,7 +226,8 @@ or not -- i.e. the script handles initializing the document instead of the [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{ +POST sessions/session/dh3sgudg8gsrgl/_update +{ "scripted_upsert":true, "script" : { "id": "my_web_session_summariser", @@ -193,7 +240,7 @@ curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{ } }, "upsert" : {} -}' +} -------------------------------------------------- [float] @@ -205,13 +252,16 @@ value: [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ +POST test/type1/1/_update +{ "doc" : { "name" : "new_name" }, "doc_as_upsert" : true -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] [float] @@ -255,10 +305,12 @@ See <> for details. Control when the changes made by this request are visible to search. See <>. -`fields`:: +`_source`:: + +Allows to control if and how the updated source should be returned in the response. +By default the updated source is not returned. +See <> for details. -Return the relevant fields from the updated document. Specify `_source` to -return the full updated source. `version` & `version_type`:: diff --git a/docs/reference/mapping/params.asciidoc b/docs/reference/mapping/params.asciidoc index 5134ab6733a..e7d2d7ac0c8 100644 --- a/docs/reference/mapping/params.asciidoc +++ b/docs/reference/mapping/params.asciidoc @@ -15,15 +15,11 @@ The following mapping parameters are common to some or all field datatypes: * <> * <> * <> -* <> -* <> -* <> * <> * <> * <> * <> * <> -* <> * <> * <> * <> @@ -54,12 +50,6 @@ include::params/fielddata.asciidoc[] include::params/format.asciidoc[] -include::params/geohash.asciidoc[] - -include::params/geohash-precision.asciidoc[] - -include::params/geohash-prefix.asciidoc[] - include::params/ignore-above.asciidoc[] include::params/ignore-malformed.asciidoc[] @@ -70,8 +60,6 @@ include::params/index.asciidoc[] include::params/index-options.asciidoc[] -include::params/lat-lon.asciidoc[] - include::params/multi-fields.asciidoc[] include::params/norms.asciidoc[] diff --git a/docs/reference/mapping/params/geohash-precision.asciidoc b/docs/reference/mapping/params/geohash-precision.asciidoc deleted file mode 100644 index 35020feea5f..00000000000 --- a/docs/reference/mapping/params/geohash-precision.asciidoc +++ /dev/null @@ -1,72 +0,0 @@ -[[geohash-precision]] -=== `geohash_precision` - -deprecated[5.0.0, Will be removed in the next major version.] - -Geohashes are a form of lat/lon encoding which divides the earth up into -a grid. Each cell in this grid is represented by a geohash string. Each -cell in turn can be further subdivided into smaller cells which are -represented by a longer string. So the longer the geohash, the smaller -(and thus more accurate) the cell is. - -The `geohash_precision` setting controls the length of the geohash that is -indexed when the <> option is enabled, and the maximum -geohash length when the <> option is enabled. - -It accepts: - -* a number between 1 and 12 (default), which represents the length of the geohash. -* a <>, e.g. `1km`. - -If a distance is specified, it will be translated to the smallest -geohash-length that will provide the requested resolution. - -For example, using this mapping: - -[source,js] --------------------------------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "location": { - "type": "geo_point", - "geohash_prefix": true, - "geohash_precision": 6 <1> - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:geo_point geohash_precision parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash_prefix parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash parameter is deprecated and will be removed in the next major release] - -You can issue this index and query: -[source,js] --------------------------------------------------- -PUT my_index/my_type/1?refresh -{ - "location": { - "lat": 41.12, - "lon": -71.34 - } -} - -GET my_index/_search?fielddata_fields=location.geohash -{ - "query": { - "term": { - "location.geohash": "drm3bt" - } - } -} - --------------------------------------------------- -// CONSOLE -// TEST[continued] - -<1> A `geohash_precision` of 6 equates to geohash cells of approximately 1.26km x 0.6km diff --git a/docs/reference/mapping/params/geohash-prefix.asciidoc b/docs/reference/mapping/params/geohash-prefix.asciidoc deleted file mode 100644 index 51dfc829947..00000000000 --- a/docs/reference/mapping/params/geohash-prefix.asciidoc +++ /dev/null @@ -1,74 +0,0 @@ -[[geohash-prefix]] -=== `geohash_prefix` - -deprecated[5.0.0, Will be removed in the next major version.] - - -Geohashes are a form of lat/lon encoding which divides the earth up into -a grid. Each cell in this grid is represented by a geohash string. Each -cell in turn can be further subdivided into smaller cells which are -represented by a longer string. So the longer the geohash, the smaller -(and thus more accurate) the cell is. - -While the <> option enables indexing the geohash that -corresponds to the lat/lon point, at the specified -<>, the `geohash_prefix` option will also -index all the enclosing cells as well. - -For instance, a geohash of `drm3btev3e86` will index all of the following -terms: [ `d`, `dr`, `drm`, `drm3`, `drm3b`, `drm3bt`, `drm3bte`, `drm3btev`, -`drm3btev3`, `drm3btev3e`, `drm3btev3e8`, `drm3btev3e86` ]. - -The geohash prefixes can be used with the -<> to find points within a -particular geohash, or its neighbours: - - -[source,js] --------------------------------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "location": { - "type": "geo_point", - "geohash_prefix": true, - "geohash_precision": 6 - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:geo_point geohash_precision parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash_prefix parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash parameter is deprecated and will be removed in the next major release] - -[source,js] --------------------------------------------------- -PUT my_index/my_type/1 -{ - "location": { - "lat": 41.12, - "lon": -71.34 - } -} - -GET my_index/_search?fielddata_fields=location.geohash -{ - "query": { - "geohash_cell": { - "location": { - "lat": 41.02, - "lon": -71.48 - }, - "precision": 4, <1> - "neighbors": true <1> - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[continued] diff --git a/docs/reference/mapping/params/geohash.asciidoc b/docs/reference/mapping/params/geohash.asciidoc deleted file mode 100644 index 95f91fc6e86..00000000000 --- a/docs/reference/mapping/params/geohash.asciidoc +++ /dev/null @@ -1,78 +0,0 @@ -[[geohash]] -=== `geohash` - -deprecated[5.0.0, Will be removed in the next major version.] - -Geohashes are a form of lat/lon encoding which divides the earth up into -a grid. Each cell in this grid is represented by a geohash string. Each -cell in turn can be further subdivided into smaller cells which are -represented by a longer string. So the longer the geohash, the smaller -(and thus more accurate) the cell is. - -Because geohashes are just strings, they can be stored in an inverted -index like any other string, which makes querying them very efficient. - -If you enable the `geohash` option, a `geohash` ``sub-field'' will be indexed -as, eg `.geohash`. The length of the geohash is controlled by the -<> parameter. - -If the <> option is enabled, the `geohash` -option will be enabled automatically. - -For example, with this mapping: - -[source,js] --------------------------------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "location": { - "type": "geo_point", <1> - "geohash": true - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:geo_point geohash parameter is deprecated and will be removed in the next major release] - -You can index a document and use this query: - -[source,js] --------------------------------------------------- -PUT my_index/my_type/1?refresh -{ - "location": { - "lat": 41.12, - "lon": -71.34 - } -} - -GET my_index/_search?fielddata_fields=location.geohash <2> -{ - "query": { - "prefix": { - "location.geohash": "drm3b" <3> - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[continued] - -<1> A `location.geohash` field will be indexed for each geo-point. -<2> The geohash can be retrieved with <>. -<3> A <> query can find all geohashes which start with a particular prefix. - -[WARNING] -============================================ - -A `prefix` query on geohashes is expensive. Instead, consider using the -<> to pay the expense once at index time -instead of on every query. - -============================================ diff --git a/docs/reference/mapping/params/lat-lon.asciidoc b/docs/reference/mapping/params/lat-lon.asciidoc deleted file mode 100644 index 234c652c932..00000000000 --- a/docs/reference/mapping/params/lat-lon.asciidoc +++ /dev/null @@ -1,80 +0,0 @@ -[[lat-lon]] -=== `lat_lon` - -deprecated[5.0.0, ????????] -// https://github.com/elastic/elasticsearch/issues/19792 - -<> are usually performed by plugging the value of -each <> field into a formula to determine whether it -falls into the required area or not. Unlike most queries, the inverted index -is not involved. - -Setting `lat_lon` to `true` causes the latitude and longitude values to be -indexed as numeric fields (called `.lat` and `.lon`). These fields can be used -by the <> and -<> queries instead of -performing in-memory calculations. So this mapping: - -[source,js] --------------------------------------------------- -PUT my_index -{ - "mappings": { - "my_type": { - "properties": { - "location": { - "type": "geo_point", - "lat_lon": true <1> - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:geo_point lat_lon parameter is deprecated and will be removed in the next major release] -<1> Setting `lat_lon` to true indexes the geo-point in the `location.lat` and `location.lon` fields. - -With this document: - -[source,js] --------------------------------------------------- -PUT my_index/my_type/1?refresh -{ - "location": { - "lat": 41.12, - "lon": -71.34 - } -} --------------------------------------------------- -// CONSOLE -// TEST[continued] - -Allows this query: - -[source,js] --------------------------------------------------- -GET my_index/_search -{ - "query": { - "geo_distance": { - "location": { - "lat": 41, - "lon": -71 - }, - "distance": "50km", - "optimize_bbox": "indexed" <1> - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[continued] -// TEST[warning:Deprecated field [optimize_bbox] used, replaced by [no replacement: `optimize_bbox` is no longer supported due to recent improvements]] -<1> The `indexed` option tells the geo-distance query to use the inverted index instead of the in-memory calculation. - -Whether the in-memory or indexed operation performs better depends both on -your dataset and on the types of queries that you are running. - -NOTE: The `lat_lon` option only makes sense for single-value `geo_point` -fields. It will not work with arrays of geo-points. diff --git a/docs/reference/mapping/types/geo-point.asciidoc b/docs/reference/mapping/types/geo-point.asciidoc index 35001e5675f..a839117c7c5 100644 --- a/docs/reference/mapping/types/geo-point.asciidoc +++ b/docs/reference/mapping/types/geo-point.asciidoc @@ -5,8 +5,7 @@ Fields of type `geo_point` accept latitude-longitude pairs, which can be used: * to find geo-points within a <>, within a certain <> of a central point, - within a <>, or within a - <> cell. + or within a <>. * to aggregate documents by <> or by <> from a central point. * to integrate distance into a document's <>. @@ -101,32 +100,11 @@ The following parameters are accepted by `geo_point` fields: [horizontal] -<>:: - - Should the geo-point also be indexed as a geohash in the `.geohash` - sub-field? Defaults to `false`, unless `geohash_prefix` is `true`. - deprecated[2.4] - -<>:: - - The maximum length of the geohash to use for the `geohash` and - `geohash_prefix` options. deprecated[2.4] - -<>:: - - Should the geo-point also be indexed as a geohash plus all its prefixes? - Defaults to `false`. deprecated[2.4] - <>:: If `true`, malformed geo-points are ignored. If `false` (default), malformed geo-points throw an exception and reject the whole document. -<>:: - - Should the geo-point also be indexed as `.lat` and `.lon` sub-fields? - Accepts `true` and `false` (default). deprecated[2.3] - ==== Using geo-points in scripts When accessing the value of a geo-point in a script, the value is returned as diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc index 57b6e8ea5c9..5bff40a8165 100644 --- a/docs/reference/modules/cluster/misc.asciidoc +++ b/docs/reference/modules/cluster/misc.asciidoc @@ -46,7 +46,7 @@ The settings which control logging can be updated dynamically with the PUT /_cluster/settings { "transient": { - "logger.indices.recovery": "DEBUG" + "logger.org.elasticsearch.indices.recovery": "DEBUG" } } ------------------------------- diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 1c92eb24e4c..bf9b6e8f9d4 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -91,13 +91,13 @@ the JVM. It is automatically enabled when using === Transport Tracer The transport module has a dedicated tracer logger which, when activated, logs incoming and out going requests. The log can be dynamically activated -by settings the level of the `transport.tracer` logger to `TRACE`: +by settings the level of the `org.elasticsearch.transport.TransportService.tracer` logger to `TRACE`: [source,js] -------------------------------------------------- curl -XPUT localhost:9200/_cluster/settings -d '{ "transient" : { - "logger.transport.tracer" : "TRACE" + "logger.org.elasticsearch.transport.TransportService.tracer" : "TRACE" } }' -------------------------------------------------- diff --git a/docs/reference/query-dsl/geo-queries.asciidoc b/docs/reference/query-dsl/geo-queries.asciidoc index 9aaf286526d..0bdf70f5b8d 100644 --- a/docs/reference/query-dsl/geo-queries.asciidoc +++ b/docs/reference/query-dsl/geo-queries.asciidoc @@ -31,11 +31,6 @@ The queries in this group are: Find documents with geo-points within the specified polygon. -<> query:: - - Find geo-points whose geohash intersects with the geohash of the specified - point. - include::geo-shape-query.asciidoc[] @@ -46,5 +41,3 @@ include::geo-distance-query.asciidoc[] include::geo-distance-range-query.asciidoc[] include::geo-polygon-query.asciidoc[] - -include::geohash-cell-query.asciidoc[] diff --git a/docs/reference/query-dsl/geohash-cell-query.asciidoc b/docs/reference/query-dsl/geohash-cell-query.asciidoc deleted file mode 100644 index e6bcfea0288..00000000000 --- a/docs/reference/query-dsl/geohash-cell-query.asciidoc +++ /dev/null @@ -1,81 +0,0 @@ -[[query-dsl-geohash-cell-query]] -=== Geohash Cell Query - -The `geohash_cell` query provides access to a hierarchy of geohashes. -By defining a geohash cell, only <> -within this cell will match this filter. - -To get this filter work all prefixes of a geohash need to be indexed. In -example a geohash `u30` needs to be decomposed into three terms: `u30`, -`u3` and `u`. This decomposition must be enabled in the mapping of the -<> field that's going to be filtered by -setting the `geohash_prefix` option: - -[source,js] --------------------------------------------------- -PUT /my_index -{ - "mappings" : { - "location": { - "properties": { - "pin": { - "type": "geo_point", - "geohash": true, - "geohash_prefix": true, - "geohash_precision": 10 - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:geo_point geohash parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash_precision parameter is deprecated and will be removed in the next major release] -// TEST[warning:geo_point geohash_prefix parameter is deprecated and will be removed in the next major release] - -The geohash cell can defined by all formats of `geo_points`. If such a cell is -defined by a latitude and longitude pair the size of the cell needs to be -setup. This can be done by the `precision` parameter of the filter. This -parameter can be set to an integer value which sets the length of the geohash -prefix. Instead of setting a geohash length directly it is also possible to -define the precision as distance, in example `"precision": "50m"`. (See -<>.) - -The `neighbor` option of the filter offers the possibility to filter cells -next to the given cell. - -[source,js] --------------------------------------------------- -GET /_search -{ - "query": { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geohash_cell": { - "pin": { - "lat": 13.4080, - "lon": 52.5186 - }, - "precision": 3, - "neighbors": true - } - } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[continued] - -[float] -==== Ignore Unmapped - -When set to `true` the `ignore_unmapped` option will ignore an unmapped field -and will not match any documents for this query. This can be useful when -querying multiple indexes which might have different mappings. When set to -`false` (the default value) the query will throw an exception if the field -is not mapped. diff --git a/docs/reference/query-dsl/span-field-masking-query.asciidoc b/docs/reference/query-dsl/span-field-masking-query.asciidoc new file mode 100644 index 00000000000..d9e96635a29 --- /dev/null +++ b/docs/reference/query-dsl/span-field-masking-query.asciidoc @@ -0,0 +1,43 @@ +[[query-dsl-span-field-masking-query]] +=== Span Field Masking Query + +Wrapper to allow span queries to participate in composite single-field span queries by 'lying' about their search field. The span field masking query maps to Lucene's `SpanFieldMaskingQuery` + +This can be used to support queries like `span-near` or `span-or` across different fields, which is not ordinarily permitted. + +Span field masking query is invaluable in conjunction with *multi-fields* when same content is indexed with multiple analyzers. For instance we could index a field with the standard analyzer which breaks text up into words, and again with the english analyzer which stems words into their root form. + +Example: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query": { + "span_near": { + "clauses": [ + { + "span_term": { + "text": "quick brown" + } + }, + { + "field_masking_span": { + "query": { + "span_term": { + "text.stems": "fox" + } + }, + "field": "text" + } + } + ], + "slop": 5, + "in_order": false + } + } +} +-------------------------------------------------- +// CONSOLE + +Note: as span field masking query returns the masked field, scoring will be done using the norms of the field name supplied. This may lead to unexpected scoring behaviour. \ No newline at end of file diff --git a/docs/reference/query-dsl/span-queries.asciidoc b/docs/reference/query-dsl/span-queries.asciidoc index 63aad48d987..4a1a019574e 100644 --- a/docs/reference/query-dsl/span-queries.asciidoc +++ b/docs/reference/query-dsl/span-queries.asciidoc @@ -47,6 +47,9 @@ Accepts a list of span queries, but only returns those spans which also match a The result from a single span query is returned as long is its span falls within the spans returned by a list of other span queries. +<>:: + +Allows queries like `span-near` or `span-or` across different fields. include::span-term-query.asciidoc[] @@ -63,3 +66,5 @@ include::span-not-query.asciidoc[] include::span-containing-query.asciidoc[] include::span-within-query.asciidoc[] + +include::span-field-masking-query.asciidoc[] \ No newline at end of file diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 29a9986bc8f..78a8d310aaa 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -131,13 +131,6 @@ The `geo_shape` filter has been replaced by the <>. It behaves as a query in ``query context'' and as a filter in ``filter context'' (see <>). -[role="exclude",id="query-dsl-geohash-cell-filter"] -=== Geohash Cell Filter - -The `geohash_cell` filter has been replaced by the <>. -It behaves as a query in ``query context'' and as a filter in ``filter -context'' (see <>). - [role="exclude",id="query-dsl-has-child-filter"] === Has Child Filter @@ -255,6 +248,12 @@ The `fuzzy_like_this_field` or `flt_field` query has been removed. Instead use the <> parameter with the <> or the <>. +[role="exclude",id="query-dsl-geohash-cell-query"] +=== Geohash Cell Query + +The `geohash_cell` query has been removed. Instead use the +<>. + [role="exclude",id="search-more-like-this"] === More Like This API diff --git a/docs/reference/search/explain.asciidoc b/docs/reference/search/explain.asciidoc index 125f3124bff..558071eedcf 100644 --- a/docs/reference/search/explain.asciidoc +++ b/docs/reference/search/explain.asciidoc @@ -15,35 +15,70 @@ Full query example: [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/twitter/tweet/1/_explain' -d '{ +GET /twitter/tweet/0/_explain +{ "query" : { - "term" : { "message" : "search" } + "match" : { "message" : "elasticsearch" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] This will yield the following result: [source,js] -------------------------------------------------- { - "matches" : true, + "_index" : "twitter", + "_type" : "tweet", + "_id" : "0", + "matched" : true, "explanation" : { - "value" : 0.15342641, - "description" : "fieldWeight(message:search in 0), product of:", + "value" : 1.55077, + "description" : "sum of:", "details" : [ { - "value" : 1.0, - "description" : "tf(termFreq(message:search)=1)" + "value" : 1.55077, + "description" : "weight(message:elasticsearch in 0) [PerFieldSimilarity], result of:", + "details" : [ { + "value" : 1.55077, + "description" : "score(doc=0,freq=1.0 = termFreq=1.0\n), product of:", + "details" : [ { + "value" : 1.3862944, + "description" : "idf(docFreq=1, docCount=5)", + "details" : [ ] + }, { + "value" : 1.1186441, + "description" : "tfNorm, computed from:", + "details" : [ + { "value" : 1.0, "description" : "termFreq=1.0", "details" : [ ] }, + { "value" : 1.2, "description" : "parameter k1", "details" : [ ] }, + { "value" : 0.75, "description" : "parameter b", "details" : [ ] }, + { "value" : 5.4, "description" : "avgFieldLength", "details" : [ ] }, + { "value" : 4.0, "description" : "fieldLength", "details" : [ ] } + ] + } ] + } ] }, { - "value" : 0.30685282, - "description" : "idf(docFreq=1, maxDocs=1)" - }, { - "value" : 0.5, - "description" : "fieldNorm(field=message, doc=0)" + "value" : 0.0, + "description" : "match on required clause, product of:", + "details" : [ { + "value" : 0.0, + "description" : "# clause", + "details" : [ ] + }, { + "value" : 1.0, + "description" : "_type:tweet, product of:", + "details" : [ + { "value" : 1.0, "description" : "boost", "details" : [ ] }, + { "value" : 1.0, "description" : "queryNorm", "details" : [ ] } + ] + } ] } ] } } -------------------------------------------------- +// TESTRESPONSE There is also a simpler way of specifying the query via the `q` parameter. The specified `q` parameter value is then parsed as if the @@ -52,8 +87,10 @@ explain api: [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/twitter/tweet/1/_explain?q=message:search' +GET /twitter/tweet/0/_explain?q=message:search -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] This will yield the same result as the previous request. @@ -66,7 +103,7 @@ This will yield the same result as the previous request. Set to `true` to retrieve the `_source` of the document explained. You can also retrieve part of the document by using `_source_include` & `_source_exclude` (see <> for more details) -`fields`:: +`stored_fields`:: Allows to control which stored fields to return as part of the document explained. diff --git a/docs/reference/search/multi-search.asciidoc b/docs/reference/search/multi-search.asciidoc index d32cd45d87d..293fb530cfa 100644 --- a/docs/reference/search/multi-search.asciidoc +++ b/docs/reference/search/multi-search.asciidoc @@ -80,3 +80,36 @@ This default is based on the number of data nodes and the default search thread === Security See <> + +[float] +[[template-msearch]] +=== Template support + +Much like described in <> for the _search resource, _msearch +also provides support for templates. Submit them like follows: + +[source,js] +----------------------------------------------- +$ cat requests +{"index" : "main"} +{ "inline" : "{ \"query\": { \"match_{{template}}\": {} } }", "params": { "template": "all" } } +{"index" : "main"} +{ "inline" : "{ \"query\": { \"match_{{template}}\": {} } }", "params": { "template": "all" } } + +$ curl -XGET localhost:9200/_msearch/template --data-binary @requests; echo +----------------------------------------------- + +for inline templates. Alternatively for stored templates: + +[source,js] +----------------------------------------------- +$ cat requests +{"index" : "main"} +{ "template": { "id": "template1" },"params": { "q": "foo" } } +{"index" : "main"} +{ "template": { "id": "template2" },"params": { "q": "bar" } } + +$ curl -XGET localhost:9200/_msearch/template --data-binary @requests; echo +---------------------------------------------- + + diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index a9adc157bd3..852710df70b 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -7,41 +7,49 @@ example: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/twitter/tweet/_search' -d '{ +GET /twitter/tweet/_search +{ "query" : { "term" : { "user" : "kimchy" } } } -' -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] And here is a sample response: [source,js] -------------------------------------------------- { + "took": 1, + "timed_out": false, "_shards":{ - "total" : 5, - "successful" : 5, + "total" : 1, + "successful" : 1, "failed" : 0 }, "hits":{ "total" : 1, + "max_score": 1.3862944, "hits" : [ { "_index" : "twitter", "_type" : "tweet", - "_id" : "1", + "_id" : "0", + "_score": 1.3862944, "_source" : { "user" : "kimchy", - "postDate" : "2009-11-15T14:12:12", - "message" : "trying out Elasticsearch" + "message": "trying out Elasticsearch", + "date" : "2009-11-15T14:12:12", + "likes" : 0 } } ] } } -------------------------------------------------- +// TESTRESPONSE[s/"took": 1/"took": $body.took/] [float] === Parameters @@ -105,8 +113,10 @@ matching document was found (per shard). [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_search?q=tag:wow&size=0&terminate_after=1' +GET /_search?q=message:elasticsearch&size=0&terminate_after=1 -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] The response will not contain any hits as the `size` was set to `0`. The `hits.total` will be either equal to `0`, indicating that there were no @@ -128,12 +138,12 @@ be set to `true` in the response. }, "hits": { "total": 1, - "max_score": 0, + "max_score": 0.0, "hits": [] } } -------------------------------------------------- - +// TESTRESPONSE[s/"took": 3/"took": $body.took/] include::request/query.asciidoc[] diff --git a/docs/reference/search/request/rescore.asciidoc b/docs/reference/search/request/rescore.asciidoc index 488884ef8da..179587daaaf 100644 --- a/docs/reference/search/request/rescore.asciidoc +++ b/docs/reference/search/request/rescore.asciidoc @@ -12,9 +12,11 @@ A `rescore` request is executed on each shard before it returns its results to be sorted by the node handling the overall search request. Currently the rescore API has only one implementation: the query -rescorer, which uses a query to tweak the scoring. In the future, +rescorer, which uses a query to tweak the scoring. In the future, alternative rescorers may be made available, for example, a pair-wise rescorer. +NOTE: the `rescore` phase is not executed when <> is used. + NOTE: when exposing pagination to your users, you should not change `window_size` as you step through each page (by passing different `from` values) since that can alter the top hits causing results to diff --git a/docs/reference/search/request/source-filtering.asciidoc b/docs/reference/search/request/source-filtering.asciidoc index 03c02538a0e..5f42b5422bc 100644 --- a/docs/reference/search/request/source-filtering.asciidoc +++ b/docs/reference/search/request/source-filtering.asciidoc @@ -5,7 +5,7 @@ Allows to control how the `_source` field is returned with every hit. By default operations return the contents of the `_source` field unless -you have used the `fields` parameter or if the `_source` field is disabled. +you have used the `stored_fields` parameter or if the `_source` field is disabled. You can turn off `_source` retrieval by using the `_source` parameter: diff --git a/docs/reference/search/request/stored-fields.asciidoc b/docs/reference/search/request/stored-fields.asciidoc index b9723b40a05..da9a0830621 100644 --- a/docs/reference/search/request/stored-fields.asciidoc +++ b/docs/reference/search/request/stored-fields.asciidoc @@ -38,10 +38,7 @@ GET /_search -------------------------------------------------- // CONSOLE - -For backwards compatibility, if the fields parameter specifies fields which are not stored (`store` mapping set to -`false`), it will load the `_source` and extract it from it. This functionality has been replaced by the -<> parameter. +If the requested fields are not stored (`store` mapping set to `false`), they will be ignored. Field values fetched from the document it self are always returned as an array. Metadata fields like `_routing` and `_parent` fields are never returned as an array. diff --git a/docs/reference/search/search-shards.asciidoc b/docs/reference/search/search-shards.asciidoc index 718d47a986f..c07c3755a26 100644 --- a/docs/reference/search/search-shards.asciidoc +++ b/docs/reference/search/search-shards.asciidoc @@ -14,49 +14,27 @@ Full example: [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/twitter/_search_shards' +GET /twitter/_search_shards -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\n/] This will yield the following result: [source,js] -------------------------------------------------- { - "nodes": { - "JklnKbD7Tyqi9TP3_Q_tBg": { - "name": "Rl'nnd", - "transport_address": "inet[/192.168.1.113:9300]" - } - }, + "nodes": ..., "shards": [ [ { "index": "twitter", "node": "JklnKbD7Tyqi9TP3_Q_tBg", "primary": true, - "relocating_node": null, - "shard": 3, - "state": "STARTED" - } - ], - [ - { - "index": "twitter", - "node": "JklnKbD7Tyqi9TP3_Q_tBg", - "primary": true, - "relocating_node": null, - "shard": 4, - "state": "STARTED" - } - ], - [ - { - "index": "twitter", - "node": "JklnKbD7Tyqi9TP3_Q_tBg", - "primary": true, - "relocating_node": null, "shard": 0, - "state": "STARTED" + "state": "STARTED", + "allocation_id": {"id":"0TvkCyF7TAmM1wHP4a42-A"}, + "relocating_node": null } ], [ @@ -64,52 +42,81 @@ This will yield the following result: "index": "twitter", "node": "JklnKbD7Tyqi9TP3_Q_tBg", "primary": true, - "relocating_node": null, - "shard": 2, - "state": "STARTED" - } - ], - [ - { - "index": "twitter", - "node": "JklnKbD7Tyqi9TP3_Q_tBg", - "primary": true, - "relocating_node": null, "shard": 1, - "state": "STARTED" + "state": "STARTED", + "allocation_id": {"id":"fMju3hd1QHWmWrIgFnI4Ww"}, + "relocating_node": null + } + ], + [ + { + "index": "twitter", + "node": "JklnKbD7Tyqi9TP3_Q_tBg", + "primary": true, + "shard": 2, + "state": "STARTED", + "allocation_id": {"id":"Nwl0wbMBTHCWjEEbGYGapg"}, + "relocating_node": null + } + ], + [ + { + "index": "twitter", + "node": "JklnKbD7Tyqi9TP3_Q_tBg", + "primary": true, + "shard": 3, + "state": "STARTED", + "allocation_id": {"id":"bU_KLGJISbW0RejwnwDPKw"}, + "relocating_node": null + } + ], + [ + { + "index": "twitter", + "node": "JklnKbD7Tyqi9TP3_Q_tBg", + "primary": true, + "shard": 4, + "state": "STARTED", + "allocation_id": {"id":"DMs7_giNSwmdqVukF7UydA"}, + "relocating_node": null } ] ] } -------------------------------------------------- +// TESTRESPONSE[s/"nodes": ...,/"nodes": $body.nodes,/] +// TESTRESPONSE[s/JklnKbD7Tyqi9TP3_Q_tBg/$body.shards.0.0.node/] +// TESTRESPONSE[s/0TvkCyF7TAmM1wHP4a42-A/$body.shards.0.0.allocation_id.id/] +// TESTRESPONSE[s/fMju3hd1QHWmWrIgFnI4Ww/$body.shards.1.0.allocation_id.id/] +// TESTRESPONSE[s/Nwl0wbMBTHCWjEEbGYGapg/$body.shards.2.0.allocation_id.id/] +// TESTRESPONSE[s/bU_KLGJISbW0RejwnwDPKw/$body.shards.3.0.allocation_id.id/] +// TESTRESPONSE[s/DMs7_giNSwmdqVukF7UydA/$body.shards.4.0.allocation_id.id/] And specifying the same request, this time with a routing value: [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/twitter/_search_shards?routing=foo,baz' +GET /twitter/_search_shards?routing=foo,baz -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\n/] This will yield the following result: [source,js] -------------------------------------------------- { - "nodes": { - "JklnKbD7Tyqi9TP3_Q_tBg": { - "name": "Rl'nnd", - "transport_address": "inet[/192.168.1.113:9300]" - } - }, + "nodes": ..., "shards": [ [ { "index": "twitter", "node": "JklnKbD7Tyqi9TP3_Q_tBg", "primary": true, - "relocating_node": null, - "shard": 2, - "state": "STARTED" + "shard": 0, + "state": "STARTED", + "allocation_id": {"id":"0TvkCyF7TAmM1wHP4a42-A"}, + "relocating_node": null } ], [ @@ -117,14 +124,19 @@ This will yield the following result: "index": "twitter", "node": "JklnKbD7Tyqi9TP3_Q_tBg", "primary": true, - "relocating_node": null, - "shard": 4, - "state": "STARTED" + "shard": 1, + "state": "STARTED", + "allocation_id": {"id":"fMju3hd1QHWmWrIgFnI4Ww"}, + "relocating_node": null } ] ] } -------------------------------------------------- +// TESTRESPONSE[s/"nodes": ...,/"nodes": $body.nodes,/] +// TESTRESPONSE[s/JklnKbD7Tyqi9TP3_Q_tBg/$body.shards.0.0.node/] +// TESTRESPONSE[s/0TvkCyF7TAmM1wHP4a42-A/$body.shards.0.0.allocation_id.id/] +// TESTRESPONSE[s/fMju3hd1QHWmWrIgFnI4Ww/$body.shards.1.0.allocation_id.id/] This time the search will only be executed against two of the shards, because routing values have been specified. diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 40118e76931..eccba57dee1 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -17,38 +17,48 @@ twitter index: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/twitter/_search?q=user:kimchy' +GET /twitter/_search?q=user:kimchy -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] We can also search within specific types: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/twitter/tweet,user/_search?q=user:kimchy' +GET /twitter/tweet,user/_search?q=user:kimchy -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] We can also search all tweets with a certain tag across several indices (for example, when each user has his own index): [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/kimchy,elasticsearch/tweet/_search?q=tag:wow' +GET /kimchy,elasticsearch/tweet/_search?q=tag:wow -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/] Or we can search all tweets across all available indices using `_all` placeholder: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_all/tweet/_search?q=tag:wow' +GET /_all/tweet/_search?q=tag:wow -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] Or even search across all indices and all types: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_search?q=tag:wow' +GET /_search?q=tag:wow -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] By default elasticsearch rejects search requests that would query more than 1000 shards. The reason is that such large numbers of shards make the job of diff --git a/docs/reference/search/suggesters/context-suggest.asciidoc b/docs/reference/search/suggesters/context-suggest.asciidoc index 293e8bb573c..66e19eaadb5 100644 --- a/docs/reference/search/suggesters/context-suggest.asciidoc +++ b/docs/reference/search/suggesters/context-suggest.asciidoc @@ -220,7 +220,6 @@ at index time. At query time, suggestions can be filtered and boosted if they ar a certain distance of a specified geo location. Internally, geo points are encoded as geohashes with the specified precision. -See <> for more background details. [float] ===== Geo Mapping diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 518fb24a8bd..8db4f6bae59 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -148,10 +148,10 @@ will be compressed as they are rolled. Multiple configuration files can be loaded (in which case they will get merged) as long as they are named `log4j2.properties` and have the Elasticsearch config directory as an ancestor; this is useful for plugins that expose additional -loggers. The logger section contains the java packages and their corresponding -log level, where it is possible to omit the `org.elasticsearch` prefix. The -appender section contains the destinations for the logs. Extensive information -on how to customize logging and all the supported appenders can be found on the +loggers. The logger section contains the java packages and their corresponding +log level. The appender section contains the destinations for the logs. +Extensive information on how to customize logging and all the supported +appenders can be found on the http://logging.apache.org/log4j/2.x/manual/configuration.html[Log4j documentation]. diff --git a/docs/reference/setup/install/systemd.asciidoc b/docs/reference/setup/install/systemd.asciidoc index 035932a83f8..bf94e95fb63 100644 --- a/docs/reference/setup/install/systemd.asciidoc +++ b/docs/reference/setup/install/systemd.asciidoc @@ -18,13 +18,36 @@ sudo systemctl stop elasticsearch.service -------------------------------------------- These commands provide no feedback as to whether Elasticsearch was started -successfully or not. Instead, this information will be written to the -`systemd` journal, which can be tailed as follows: +successfully or not. Instead, this information will be written in the log +files located in `/var/log/elasticsearch/`. + +By default the Elasticsearch service doesn't log information in the `systemd` +journal. To enable `journalctl` logging, the `--quiet` option must be removed + from the `ExecStart` command line in the `elasticsearch.service` file. + +When `systemd` logging is enabled, the logging information are available using +the `journalctl` commands: + +To tail the journal: [source,sh] -------------------------------------------- sudo journalctl -f -------------------------------------------- -Log files can be found in `/var/log/elasticsearch/`. +To list journal entries for the elasticsearch service: +[source,sh] +-------------------------------------------- +sudo journalctl --unit elasticsearch +-------------------------------------------- + +To list journal entries for the elasticsearch service starting from a given time: + +[source,sh] +-------------------------------------------- +sudo journalctl --unit elasticsearch --since "2016-10-30 18:17:16" +-------------------------------------------- + +Check `man journalctl` or https://www.freedesktop.org/software/systemd/man/journalctl.html for +more command line options. diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/zip-targz.asciidoc index 5f85447bdcd..14421795e05 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/zip-targz.asciidoc @@ -52,11 +52,14 @@ Elasticsearch can be started from the command line as follows: ./bin/elasticsearch -------------------------------------------- -By default, Elasticsearch runs in the foreground, prints its logs to `STDOUT`, -and can be stopped by pressing `Ctrl-C`. +By default, Elasticsearch runs in the foreground, prints its logs to the +standard output (`stdout`), and can be stopped by pressing `Ctrl-C`. include::check-running.asciidoc[] +Log printing to `stdout` can be disabled using the `-q` or `--quiet` +option on the command line. + [[setup-installation-daemon]] ==== Running as a daemon diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java index 4509ab3cb50..98aaa891640 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java @@ -242,8 +242,8 @@ public class PercolateRequest extends ActionRequest implements if (source == null && getRequest == null) { validationException = addValidationError("source or get is missing", validationException); } - if (getRequest != null && getRequest.fields() != null) { - validationException = addValidationError("get fields option isn't supported via percolate request", validationException); + if (getRequest != null && getRequest.storedFields() != null) { + validationException = addValidationError("get stored fields option isn't supported via percolate request", validationException); } return validationException; } diff --git a/plugins/discovery-file/build.gradle b/plugins/discovery-file/build.gradle new file mode 100644 index 00000000000..36fd87e37a9 --- /dev/null +++ b/plugins/discovery-file/build.gradle @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Discovery file plugin enables unicast discovery from hosts stored in a file.' + classname 'org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin' +} + +bundlePlugin { + from('config/discovery-file') { + into 'config' + } +} diff --git a/plugins/discovery-file/config/discovery-file/unicast_hosts.txt b/plugins/discovery-file/config/discovery-file/unicast_hosts.txt new file mode 100644 index 00000000000..5e265e0f295 --- /dev/null +++ b/plugins/discovery-file/config/discovery-file/unicast_hosts.txt @@ -0,0 +1,20 @@ +# The unicast_hosts.txt file contains the list of unicast hosts to connect to +# for pinging during the discovery process, when using the file-based discovery +# mechanism. This file should contain one entry per line, where an entry is a +# host/port combination. The host and port should be separated by a `:`. If +# the port is left off, a default port of 9300 is assumed. For example, if the +# cluster has three nodes that participate in the discovery process: +# (1) 66.77.88.99 running on port 9300 (2) 66.77.88.100 running on port 9305 +# and (3) 66.77.88.99 running on port 10005, then this file should contain the +# following text: +# +#10.10.10.5 +#10.10.10.6:9305 +#10.10.10.5:10005 +# +# For IPv6 addresses, make sure to put a bracket around the host part of the address, +# for example: [2001:cdba:0000:0000:0000:0000:3257:9652]:9301 (where 9301 is the port). +# +# NOTE: all lines starting with a `#` are comments, and comments must exist +# on lines of their own (i.e. comments cannot begin in the middle of a line) +# \ No newline at end of file diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java new file mode 100644 index 00000000000..f781a3b7fe9 --- /dev/null +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.file; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.DiscoveryPlugin; +import org.elasticsearch.plugins.Plugin; + +/** + * Plugin for providing file-based unicast hosts discovery. The list of unicast hosts + * is obtained by reading the {@link FileBasedUnicastHostsProvider#UNICAST_HOSTS_FILE} in + * the {@link Environment#configFile()}/discovery-file directory. + */ +public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin { + + private static final Logger logger = Loggers.getLogger(FileBasedDiscoveryPlugin.class); + + private final Settings settings; + + public FileBasedDiscoveryPlugin(Settings settings) { + this.settings = settings; + logger.trace("starting file-based discovery plugin..."); + } + + public void onModule(DiscoveryModule discoveryModule) { + logger.trace("registering file-based unicast hosts provider"); + // using zen discovery for the discovery type and we're just adding a unicast host provider for it + discoveryModule.addUnicastHostProvider("zen", FileBasedUnicastHostsProvider.class); + } +} diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java new file mode 100644 index 00000000000..78393d34001 --- /dev/null +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.file; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider; +import org.elasticsearch.env.Environment; +import org.elasticsearch.transport.TransportService; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing.resolveDiscoveryNodes; + +/** + * An implementation of {@link UnicastHostsProvider} that reads hosts/ports + * from {@link #UNICAST_HOSTS_FILE}. + * + * Each unicast host/port that is part of the discovery process must be listed on + * a separate line. If the port is left off an entry, a default port of 9300 is + * assumed. An example unicast hosts file could read: + * + * 67.81.244.10 + * 67.81.244.11:9305 + * 67.81.244.15:9400 + */ +public class FileBasedUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { + + static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; + static final String UNICAST_HOST_PREFIX = "#zen_file_unicast_host_"; + + private final TransportService transportService; + + private final Path unicastHostsFilePath; + + private final AtomicLong nodeIdGenerator = new AtomicLong(); // generates unique ids for the node + + @Inject + public FileBasedUnicastHostsProvider(Settings settings, TransportService transportService) { + super(settings); + this.transportService = transportService; + this.unicastHostsFilePath = new Environment(settings).configFile().resolve("discovery-file").resolve(UNICAST_HOSTS_FILE); + } + + @Override + public List buildDynamicNodes() { + List hostsList; + try (Stream lines = Files.lines(unicastHostsFilePath)) { + hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments + .collect(Collectors.toList()); + } catch (FileNotFoundException | NoSuchFileException e) { + logger.warn((Supplier) () -> new ParameterizedMessage("[discovery-file] Failed to find unicast hosts file [{}]", + unicastHostsFilePath), e); + hostsList = Collections.emptyList(); + } catch (IOException e) { + logger.warn((Supplier) () -> new ParameterizedMessage("[discovery-file] Error reading unicast hosts file [{}]", + unicastHostsFilePath), e); + hostsList = Collections.emptyList(); + } + + final List discoNodes = new ArrayList<>(); + for (final String host : hostsList) { + try { + discoNodes.addAll(resolveDiscoveryNodes(host, 1, transportService, + () -> UNICAST_HOST_PREFIX + nodeIdGenerator.incrementAndGet() + "#")); + } catch (IllegalArgumentException e) { + logger.warn((Supplier) () -> new ParameterizedMessage("[discovery-file] Failed to parse transport address from [{}]", + host), e); + continue; + } + } + + logger.debug("[discovery-file] Using dynamic discovery nodes {}", discoNodes); + + return discoNodes; + } + +} diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..45905a152ce --- /dev/null +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.file; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; + +import java.io.IOException; + +/** + * Integration tests to make sure the file-based discovery plugin works in a cluster. + */ +public class FileBasedDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + public FileBasedDiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, ClientYamlTestParseException { + return ESClientYamlSuiteTestCase.createParameters(0, 1); + } +} diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java new file mode 100644 index 00000000000..abd91c8c07f --- /dev/null +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -0,0 +1,139 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.file; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.MockTcpTransport; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOSTS_FILE; +import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOST_PREFIX; + +/** + * Tests for {@link FileBasedUnicastHostsProvider}. + */ +public class FileBasedUnicastHostsProviderTests extends ESTestCase { + + private static ThreadPool threadPool; + private MockTransportService transportService; + + @BeforeClass + public static void createThreadPool() { + threadPool = new TestThreadPool(FileBasedUnicastHostsProviderTests.class.getName()); + } + + @AfterClass + public static void stopThreadPool() throws InterruptedException { + terminate(threadPool); + } + + @Before + public void createTransportSvc() { + MockTcpTransport transport = + new MockTcpTransport(Settings.EMPTY, + threadPool, + BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), + new NamedWriteableRegistry(Collections.emptyList()), + new NetworkService(Settings.EMPTY, Collections.emptyList())); + transportService = new MockTransportService(Settings.EMPTY, transport, threadPool); + } + + public void testBuildDynamicNodes() throws Exception { + final List hostEntries = Arrays.asList("#comment, should be ignored", "192.168.0.1", "192.168.0.2:9305", "255.255.23.15"); + final List nodes = setupAndRunHostProvider(hostEntries); + assertEquals(hostEntries.size() - 1, nodes.size()); // minus 1 because we are ignoring the first line that's a comment + assertEquals("192.168.0.1", nodes.get(0).getAddress().getHost()); + assertEquals(9300, nodes.get(0).getAddress().getPort()); + assertEquals(UNICAST_HOST_PREFIX + "1#", nodes.get(0).getId()); + assertEquals("192.168.0.2", nodes.get(1).getAddress().getHost()); + assertEquals(9305, nodes.get(1).getAddress().getPort()); + assertEquals(UNICAST_HOST_PREFIX + "2#", nodes.get(1).getId()); + assertEquals("255.255.23.15", nodes.get(2).getAddress().getHost()); + assertEquals(9300, nodes.get(2).getAddress().getPort()); + assertEquals(UNICAST_HOST_PREFIX + "3#", nodes.get(2).getId()); + } + + public void testEmptyUnicastHostsFile() throws Exception { + final List hostEntries = Collections.emptyList(); + final List nodes = setupAndRunHostProvider(hostEntries); + assertEquals(0, nodes.size()); + } + + public void testUnicastHostsDoesNotExist() throws Exception { + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(settings, transportService); + final List nodes = provider.buildDynamicNodes(); + assertEquals(0, nodes.size()); + } + + public void testInvalidHostEntries() throws Exception { + List hostEntries = Arrays.asList("192.168.0.1:9300:9300"); + List nodes = setupAndRunHostProvider(hostEntries); + assertEquals(0, nodes.size()); + } + + public void testSomeInvalidHostEntries() throws Exception { + List hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301"); + List nodes = setupAndRunHostProvider(hostEntries); + assertEquals(1, nodes.size()); // only one of the two is valid and will be used + assertEquals("192.168.0.1", nodes.get(0).getAddress().getHost()); + assertEquals(9301, nodes.get(0).getAddress().getPort()); + } + + // sets up the config dir, writes to the unicast hosts file in the config dir, + // and then runs the file-based unicast host provider to get the list of discovery nodes + private List setupAndRunHostProvider(final List hostEntries) throws IOException { + final Path homeDir = createTempDir(); + final Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) + .build(); + final Path configDir = homeDir.resolve("config").resolve("discovery-file"); + Files.createDirectories(configDir); + final Path unicastHostsPath = configDir.resolve(UNICAST_HOSTS_FILE); + try (BufferedWriter writer = Files.newBufferedWriter(unicastHostsPath)) { + writer.write(String.join("\n", hostEntries)); + } + + return new FileBasedUnicastHostsProvider(settings, transportService).buildDynamicNodes(); + } +} diff --git a/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yaml b/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yaml new file mode 100644 index 00000000000..74ba6b54e3c --- /dev/null +++ b/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for file-based discovery +# +"Discovery File loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: discovery-file } diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 5923e3b690e..95a0b85dba3 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -97,7 +97,9 @@ public final class GeoIpProcessor extends AbstractProcessor { throw new ElasticsearchParseException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType() + "]", new IllegalStateException()); } - ingestDocument.setFieldValue(targetField, geoData); + if (geoData.isEmpty() == false) { + ingestDocument.setFieldValue(targetField, geoData); + } } @Override @@ -149,28 +151,50 @@ public final class GeoIpProcessor extends AbstractProcessor { geoData.put("ip", NetworkAddress.format(ipAddress)); break; case COUNTRY_ISO_CODE: - geoData.put("country_iso_code", country.getIsoCode()); + String countryIsoCode = country.getIsoCode(); + if (countryIsoCode != null) { + geoData.put("country_iso_code", countryIsoCode); + } break; case COUNTRY_NAME: - geoData.put("country_name", country.getName()); + String countryName = country.getName(); + if (countryName != null) { + geoData.put("country_name", countryName); + } break; case CONTINENT_NAME: - geoData.put("continent_name", continent.getName()); + String continentName = continent.getName(); + if (continentName != null) { + geoData.put("continent_name", continentName); + } break; case REGION_NAME: - geoData.put("region_name", subdivision.getName()); + String subdivisionName = subdivision.getName(); + if (subdivisionName != null) { + geoData.put("region_name", subdivisionName); + } break; case CITY_NAME: - geoData.put("city_name", city.getName()); + String cityName = city.getName(); + if (cityName != null) { + geoData.put("city_name", cityName); + } break; case TIMEZONE: - geoData.put("timezone", location.getTimeZone()); + String locationTimeZone = location.getTimeZone(); + if (locationTimeZone != null) { + geoData.put("timezone", locationTimeZone); + } break; case LOCATION: - Map locationObject = new HashMap<>(); - locationObject.put("lat", location.getLatitude()); - locationObject.put("lon", location.getLongitude()); - geoData.put("location", locationObject); + Double latitude = location.getLatitude(); + Double longitude = location.getLongitude(); + if (latitude != null && longitude != null) { + Map locationObject = new HashMap<>(); + locationObject.put("lat", latitude); + locationObject.put("lon", longitude); + geoData.put("location", locationObject); + } break; } } @@ -202,13 +226,22 @@ public final class GeoIpProcessor extends AbstractProcessor { geoData.put("ip", NetworkAddress.format(ipAddress)); break; case COUNTRY_ISO_CODE: - geoData.put("country_iso_code", country.getIsoCode()); + String countryIsoCode = country.getIsoCode(); + if (countryIsoCode != null) { + geoData.put("country_iso_code", countryIsoCode); + } break; case COUNTRY_NAME: - geoData.put("country_name", country.getName()); + String countryName = country.getName(); + if (countryName != null) { + geoData.put("country_name", countryName); + } break; case CONTINENT_NAME: - geoData.put("continent_name", continent.getName()); + String continentName = continent.getName(); + if (continentName != null) { + geoData.put("continent_name", continentName); + } break; } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f3141c735d2..71bb4f65f85 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -33,6 +33,8 @@ import java.util.zip.GZIPInputStream; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class GeoIpProcessorTests extends ESTestCase { @@ -63,6 +65,23 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("location"), equalTo(location)); } + public void testCityWithMissingLocation() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + + Map document = new HashMap<>(); + document.put("source_field", "93.114.45.13"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + processor.execute(ingestDocument); + + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("93.114.45.13")); + @SuppressWarnings("unchecked") + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(geoData.size(), equalTo(1)); + assertThat(geoData.get("ip"), equalTo("93.114.45.13")); + } + public void testCountry() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", @@ -83,6 +102,23 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("continent_name"), equalTo("Europe")); } + public void testCountryWithMissingLocation() throws Exception { + InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); + GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class)); + + Map document = new HashMap<>(); + document.put("source_field", "93.114.45.13"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + processor.execute(ingestDocument); + + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo("93.114.45.13")); + @SuppressWarnings("unchecked") + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(geoData.size(), equalTo(1)); + assertThat(geoData.get("ip"), equalTo("93.114.45.13")); + } + public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", @@ -92,9 +128,7 @@ public class GeoIpProcessorTests extends ESTestCase { document.put("source_field", "127.0.0.1"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); - @SuppressWarnings("unchecked") - Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData.size(), equalTo(0)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("target_field"), is(false)); } /** Don't silently do DNS lookups or anything trappy on bogus data */ diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml index 33e9ec1ca46..f662f34ab57 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml @@ -122,3 +122,76 @@ - length: { _source.geoip: 2 } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.continent_name: "North America" } + +--- +"Test geoip processor with geopoint mapping (both missing and including location)": + - do: + indices.create: + index: test + body: > + { + "mappings" : { + "test" : { + "properties" : { + "geoip.location" : { + "type": "geo_point" + } + } + } + } + } + - match: { acknowledged: true } + + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "geoip" : { + "field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { field1: "93.114.45.13" } + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "93.114.45.13" } + - is_false: _source.geoip + + - do: + index: + index: test + type: test + id: 2 + pipeline: "my_pipeline" + body: { field1: "128.101.101.101" } + + - do: + get: + index: test + type: test + id: 2 + - match: { _source.field1: "128.101.101.101" } + - length: { _source.geoip: 5 } + - match: { _source.geoip.city_name: "Minneapolis" } + - match: { _source.geoip.country_iso_code: "US" } + - match: { _source.geoip.location.lon: -93.2166 } + - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.region_name: "Minnesota" } + - match: { _source.geoip.continent_name: "North America" } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index 9e3799a5d5a..c4de761c8e7 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -108,7 +108,7 @@ public class SizeMappingIT extends ESIntegTestCase { final String source = "{\"f\":10}"; indexRandom(true, client().prepareIndex("test", "type", "1").setSource(source)); - GetResponse getResponse = client().prepareGet("test", "type", "1").setFields("_size").get(); + GetResponse getResponse = client().prepareGet("test", "type", "1").setStoredFields("_size").get(); assertNotNull(getResponse.getField("_size")); assertEquals(source.length(), getResponse.getField("_size").getValue()); } diff --git a/plugins/mapper-size/src/test/resources/rest-api-spec/test/mapper_size/10_basic.yaml b/plugins/mapper-size/src/test/resources/rest-api-spec/test/mapper_size/10_basic.yaml index 9e76645e26e..f9a41e52150 100644 --- a/plugins/mapper-size/src/test/resources/rest-api-spec/test/mapper_size/10_basic.yaml +++ b/plugins/mapper-size/src/test/resources/rest-api-spec/test/mapper_size/10_basic.yaml @@ -26,7 +26,7 @@ index: test type: type1 id: 1 - fields: "_size" + stored_fields: "_size" - match: { _size: 13 } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java index 8bd2451da57..23da3a99df5 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -38,7 +38,7 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pidFile, esSettings) -> { + (foreground, pidFile, quiet, esSettings) -> { assertThat(esSettings.size(), equalTo(1)); assertThat(esSettings, hasEntry("path.home", value)); }); @@ -49,7 +49,7 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.OK, true, output -> {}, - (foreground, pidFile, esSettings) -> { + (foreground, pidFile, quiet, esSettings) -> { assertThat(esSettings.size(), equalTo(1)); assertThat(esSettings, hasEntry("path.home", commandLineValue)); }, diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java index cfa54b8c001..40759f29f68 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java @@ -87,7 +87,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { public void testDefaults() throws IOException { final Path configDir = getDataPath("config"); - final String level = randomFrom(Level.values()).toString(); + final String level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR).toString(); final Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) @@ -96,7 +96,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { final Environment environment = new Environment(settings); LogConfigurator.configure(environment, true); - final String loggerName = Loggers.commonPrefix + "test"; + final String loggerName = "test"; final Logger logger = ESLoggerFactory.getLogger(loggerName); assertThat(logger.getLevel().toString(), equalTo(level)); } @@ -113,9 +113,28 @@ public class EvilLoggerConfigurationTests extends ESTestCase { LogConfigurator.configure(environment, true); // args should overwrite whatever is in the config - final String loggerName = Loggers.commonPrefix + "test_resolve_order"; + final String loggerName = "test_resolve_order"; final Logger logger = ESLoggerFactory.getLogger(loggerName); assertTrue(logger.isTraceEnabled()); } + public void testHierarchy() throws Exception { + final Path configDir = getDataPath("hierarchy"); + final Settings settings = Settings.builder() + .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + final Environment environment = new Environment(settings); + LogConfigurator.configure(environment, true); + + assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(Level.TRACE)); + assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(Level.DEBUG)); + + final Level level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); + Loggers.setLevel(ESLoggerFactory.getLogger("x"), level); + + assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(level)); + assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(level)); + } + } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index 73fef7432f9..4d7d450bb18 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -27,6 +27,7 @@ import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.CountingNoOpAppender; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -34,8 +35,9 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.RegexMatcher; import javax.management.MBeanServerPermission; - import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessControlException; @@ -46,6 +48,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; public class EvilLoggerTests extends ESTestCase { @@ -108,6 +111,35 @@ public class EvilLoggerTests extends ESTestCase { assertThat(countingNoOpAppender.getName(), equalTo("counting_no_op")); } + public void testPrefixLogger() throws IOException, IllegalAccessException { + setupLogging("prefix"); + + final String prefix = randomBoolean() ? null : randomAsciiOfLength(16); + final Logger logger = Loggers.getLogger("prefix", prefix); + logger.info("test"); + logger.info("{}", "test"); + final Exception e = new Exception("exception"); + logger.info(new ParameterizedMessage("{}", "test"), e); + + final String path = System.getProperty("es.logs") + ".log"; + final List events = Files.readAllLines(PathUtils.get(path)); + + final StringWriter sw = new StringWriter(); + final PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + final int stackTraceLength = sw.toString().split(System.getProperty("line.separator")).length; + final int expectedLogLines = 3; + assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength)); + for (int i = 0; i < expectedLogLines; i++) { + if (prefix == null) { + assertThat(events.get(i), startsWith("test")); + } else { + assertThat(events.get(i), startsWith("[" + prefix + "] test")); + } + } + } + + public void testLog4jShutdownHack() { final AtomicBoolean denied = new AtomicBoolean(); final SecurityManager sm = System.getSecurityManager(); @@ -115,26 +147,24 @@ public class EvilLoggerTests extends ESTestCase { System.setSecurityManager(new SecurityManager() { @Override public void checkPermission(Permission perm) { - if (perm instanceof RuntimePermission && "setSecurityManager".equals(perm.getName())) { - // so we can restore the security manager at the end of the test - return; - } + // just grant all permissions to Log4j, except we deny MBeanServerPermission + // "createMBeanServer" as this will trigger the Log4j bug if (perm instanceof MBeanServerPermission && "createMBeanServer".equals(perm.getName())) { // without the hack in place, Log4j will try to get an MBean server which we will deny // with the hack in place, this permission should never be requested by Log4j denied.set(true); throw new AccessControlException("denied"); } - super.checkPermission(perm); } @Override public void checkPropertyAccess(String key) { - // so that Log4j can check if its usage of JMX is disabled or not - if ("log4j2.disable.jmx".equals(key)) { - return; - } - super.checkPropertyAccess(key); + /* + * grant access to all properties; this is so that Log4j can check if its usage + * of JMX is disabled or not by reading log4j2.disable.jmx but there are other + * properties that Log4j will try to read as well and its simpler to just grant + * them all + */ } }); diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties index ec99b136237..aca53f81c1b 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties @@ -3,13 +3,13 @@ status = error appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %m%n +appender.file.layout.pattern = [%p][%l] %marker%m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -25,7 +25,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %m%n +appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n logger.deprecation.name = deprecation logger.deprecation.level = warn diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties index 2ade4c896c3..9d59e79c08a 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties @@ -1,7 +1,7 @@ appender.console2.type = Console appender.console2.name = console2 appender.console2.layout.type = PatternLayout -appender.console2.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console2.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marke%m%n logger.second.name = second logger.second.level = debug diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties index 8699f574d53..ed794cb7c3b 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties @@ -1,7 +1,7 @@ appender.console3.type = Console appender.console3.name = console3 appender.console3.layout.type = PatternLayout -appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n logger.third.name = third logger.third.level = debug diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties index 5abca54e392..744e554ff91 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties @@ -3,13 +3,13 @@ status = error appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %m%n +appender.file.layout.pattern = [%p][%l] %marker%m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -19,7 +19,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %m%n +appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n logger.deprecation.name = deprecation logger.deprecation.level = warn diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties index 5b50bda0c3c..80a28771620 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties @@ -3,7 +3,7 @@ status = error appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n logger.has_console_appender.name = has_console_appender logger.has_console_appender.level = trace diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/hierarchy/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/hierarchy/log4j2.properties new file mode 100644 index 00000000000..622f632f4b1 --- /dev/null +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/hierarchy/log4j2.properties @@ -0,0 +1,20 @@ +status = error + +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.file.ref = file + +logger.x.name = x +logger.x.level = trace +logger.x.appenderRef.console.ref = console +logger.x.additivity = false + +logger.x_y.name = x.y +logger.x_y.level = debug +logger.x_y.appenderRef.console.ref = console +logger.x_y.additivity = false diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties index fe42489470b..d1a2c534b83 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties @@ -3,13 +3,13 @@ status = error appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %m%n +appender.file.layout.pattern = [%p][%l] %marker%m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties new file mode 100644 index 00000000000..1f18b38d91e --- /dev/null +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties @@ -0,0 +1,20 @@ +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n + +appender.file.type = File +appender.file.name = file +appender.file.fileName = ${sys:es.logs}.log +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = %marker%m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.file.ref = file + +logger.prefix.name = prefix +logger.prefix.level = info +logger.prefix.appenderRef.console.ref = console +logger.prefix.appenderRef.file.ref = file +logger.prefix.additivity = false diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 746f3291547..fb7ee1d8d3c 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -126,7 +126,16 @@ Set getVersions() { new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> xml = new XmlParser().parse(s) } - return new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /2\.\d\.\d/ }) + + // List all N-1 releases from maven central + int major = Integer.parseInt(project.version.substring(0, project.version.indexOf('.'))) - 1 + Set versions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /$major\.\d\.\d/ }) + if (versions.isEmpty() == false) { + return versions; + } + + // If no version is found, we run the tests with the current version + return Collections.singleton(project.version); } task updatePackagingTestUpgradeFromVersions { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats index 2fdeed4d13f..52f3de34a97 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/30_deb_package.bats @@ -127,10 +127,18 @@ setup() { # see postrm file assert_file_not_exist "/var/log/elasticsearch" assert_file_not_exist "/usr/share/elasticsearch/plugins" + assert_file_not_exist "/usr/share/elasticsearch/modules" assert_file_not_exist "/var/run/elasticsearch" + # Those directories are removed by the package manager + assert_file_not_exist "/usr/share/elasticsearch/bin" + assert_file_not_exist "/usr/share/elasticsearch/lib" + assert_file_not_exist "/usr/share/elasticsearch/modules" + assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" + # The configuration files are still here assert_file_exist "/etc/elasticsearch" + assert_file_exist "/etc/elasticsearch/scripts" assert_file_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_exist "/etc/elasticsearch/jvm.options" assert_file_exist "/etc/elasticsearch/log4j2.properties" @@ -152,6 +160,7 @@ setup() { @test "[DEB] verify package purge" { # all remaining files are deleted by the purge assert_file_not_exist "/etc/elasticsearch" + assert_file_not_exist "/etc/elasticsearch/scripts" assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_not_exist "/etc/elasticsearch/jvm.options" assert_file_not_exist "/etc/elasticsearch/log4j2.properties" diff --git a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats index a7aa860370c..50c6849e92e 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats @@ -116,9 +116,17 @@ setup() { # see postrm file assert_file_not_exist "/var/log/elasticsearch" assert_file_not_exist "/usr/share/elasticsearch/plugins" + assert_file_not_exist "/usr/share/elasticsearch/modules" assert_file_not_exist "/var/run/elasticsearch" + # Those directories are removed by the package manager + assert_file_not_exist "/usr/share/elasticsearch/bin" + assert_file_not_exist "/usr/share/elasticsearch/lib" + assert_file_not_exist "/usr/share/elasticsearch/modules" + assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" + assert_file_not_exist "/etc/elasticsearch" + assert_file_not_exist "/etc/elasticsearch/scripts" assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_not_exist "/etc/elasticsearch/jvm.options" assert_file_not_exist "/etc/elasticsearch/log4j2.properties" @@ -157,8 +165,14 @@ setup() { # see postrm file assert_file_not_exist "/var/log/elasticsearch" assert_file_not_exist "/usr/share/elasticsearch/plugins" + assert_file_not_exist "/usr/share/elasticsearch/modules" assert_file_not_exist "/var/run/elasticsearch" + assert_file_not_exist "/usr/share/elasticsearch/bin" + assert_file_not_exist "/usr/share/elasticsearch/lib" + assert_file_not_exist "/usr/share/elasticsearch/modules" + assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" + assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_exist "/etc/elasticsearch/elasticsearch.yml.rpmsave" assert_file_not_exist "/etc/elasticsearch/jvm.options" diff --git a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats index da7b6a180f1..857a0ba3978 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/60_systemd.bats @@ -68,9 +68,24 @@ setup() { # starting Elasticsearch so we don't have to wait for elasticsearch to scan for # them. install_elasticsearch_test_scripts + + # Capture the current epoch in millis + run date +%s + epoch="$output" + systemctl start elasticsearch.service wait_for_elasticsearch_status assert_file_exist "/var/run/elasticsearch/elasticsearch.pid" + assert_file_exist "/var/log/elasticsearch/elasticsearch.log" + + # Converts the epoch back in a human readable format + run date --date=@$epoch "+%Y-%m-%d %H:%M:%S" + since="$output" + + # Verifies that no new entries in journald have been added + # since the last start + result="$(journalctl _SYSTEMD_UNIT=elasticsearch.service --since "$since" --output cat | wc -l)" + [ "$result" -eq "0" ] } @test "[SYSTEMD] start (running)" { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats b/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats index 48bf2aca4e3..feca52c7bbc 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/80_upgrade.bats @@ -37,6 +37,11 @@ load os_package # Cleans everything for the 1st execution setup() { skip_not_dpkg_or_rpm + + sameVersion="false" + if [ "$(cat upgrade_from_version)" == "$(cat version)" ]; then + sameVersion="true" + fi } @test "[UPGRADE] install old version" { @@ -49,11 +54,7 @@ setup() { } @test "[UPGRADE] check elasticsearch version is old version" { - curl -s localhost:9200 | grep \"number\"\ :\ \"$(cat upgrade_from_version)\" || { - echo "Installed an unexpected version:" - curl -s localhost:9200 - false - } + check_elasticsearch_version "$(cat upgrade_from_version)" } @test "[UPGRADE] index some documents into a few indexes" { @@ -79,7 +80,11 @@ setup() { } @test "[UPGRADE] install version under test" { - install_package -u + if [ "$sameVersion" == "true" ]; then + install_package -f + else + install_package -u + fi } @test "[UPGRADE] start version under test" { @@ -88,12 +93,7 @@ setup() { } @test "[UPGRADE] check elasticsearch version is version under test" { - local versionToCheck=$(cat version | sed -e 's/-SNAPSHOT//') - curl -s localhost:9200 | grep \"number\"\ :\ \"$versionToCheck\" || { - echo "Installed an unexpected version:" - curl -s localhost:9200 - false - } + check_elasticsearch_version "$(cat version)" } @test "[UPGRADE] verify that the documents are there after restart" { diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index c55d28e971c..54e0ea1570d 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -217,6 +217,10 @@ fi install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar } +@test "[$GROUP] install discovery-file plugin" { + install_and_check_plugin discovery file +} + @test "[$GROUP] install ingest-attachment plugin" { # we specify the version on the poi-3.15-beta1.jar so that the test does # not spuriously pass if the jar is missing but the other poi jars @@ -353,6 +357,10 @@ fi remove_plugin discovery-ec2 } +@test "[$GROUP] remove discovery-file plugin" { + remove_plugin discovery-file +} + @test "[$GROUP] remove ingest-attachment plugin" { remove_plugin ingest-attachment } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash index d46d7fe9a5d..1060aa78849 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash @@ -44,12 +44,16 @@ export_elasticsearch_paths() { install_package() { local version=$(cat version) local rpmCommand='-i' - while getopts ":uv:" opt; do + while getopts ":fuv:" opt; do case $opt in u) rpmCommand='-U' dpkgCommand='--force-confnew' ;; + f) + rpmCommand='-U --force' + dpkgCommand='--force-conflicts' + ;; v) version=$OPTARG ;; diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index fbda05d5f30..137f87045de 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -452,6 +452,22 @@ wait_for_elasticsearch_status() { } } +# Checks the current elasticsearch version using the Info REST endpoint +# $1 - expected version +check_elasticsearch_version() { + local version=$1 + local versionToCheck=$(echo $version | sed -e 's/-SNAPSHOT//') + + run curl -s localhost:9200 + [ "$status" -eq 0 ] + + echo $output | grep \"number\"\ :\ \"$versionToCheck\" || { + echo "Installed an unexpected version:" + curl -s localhost:9200 + false + } +} + install_elasticsearch_test_scripts() { install_script is_guide.groovy install_script is_guide.mustache diff --git a/qa/vagrant/versions b/qa/vagrant/versions index c7aef6cb99a..654a95a3a25 100644 --- a/qa/vagrant/versions +++ b/qa/vagrant/versions @@ -1,16 +1 @@ -2.0.0 -2.0.1 -2.0.2 -2.1.0 -2.1.1 -2.1.2 -2.2.0 -2.2.1 -2.2.2 -2.3.0 -2.3.1 -2.3.2 -2.3.3 -2.3.4 -2.3.5 -2.4.0 +6.0.0-alpha1-SNAPSHOT diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 2a7de2c726d..cd49b9f1925 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -1,8 +1,7 @@ -apply plugin: 'java' -apply plugin: 'com.bmuschko.nexus' +apply plugin: 'elasticsearch.build' +apply plugin: 'nebula.maven-base-publish' +apply plugin: 'nebula.maven-scm' -extraArchive { - sources = false - javadoc = false - tests = false -} +test.enabled = false +jarHell.enabled = false +licenseHeaders.enabled = false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index 27d951fa41b..2ff171bf528 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -39,7 +39,19 @@ }, "fields": { "type": "list", - "description" : "Default comma-separated list of fields to return in the response for updates" + "description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request" + }, + "_source": { + "type" : "list", + "description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request" + }, + "_source_exclude": { + "type" : "list", + "description" : "Default list of fields to exclude from the returned _source field, can be overridden on each sub-request" + }, + "_source_include": { + "type" : "list", + "description" : "Default list of fields to extract and return from the _source field, can be overridden on each sub-request" }, "pipeline" : { "type" : "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json index 7858fc2ee9e..ae5ef206aa4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json @@ -33,6 +33,10 @@ "type": "list", "description" : "Comma-separated list of column names to display" }, + "health": { + "type" : "string", + "description" : "A health status (\"green\", \"yellow\", or \"red\" to filter only indices matching the specified health status" + }, "help": { "type": "boolean", "description": "Return help information", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index f8743e1d1ba..a734f7b1bac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -40,13 +40,13 @@ "type" : "boolean", "description" : "Specify whether to return detailed information about score computation as part of a hit" }, - "fields": { + "stored_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as part of a hit" + "description" : "A comma-separated list of stored fields to return as part of a hit" }, - "fielddata_fields": { + "docvalue_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" + "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" }, "from": { "type" : "number", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json index 30b5deff1d3..328794ffdd0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json @@ -41,9 +41,9 @@ "type" : "string", "description" : "The default field for query string query (default: _all)" }, - "fields": { + "stored_fields": { "type": "list", - "description" : "A comma-separated list of fields to return in the response" + "description" : "A comma-separated list of stored fields to return in the response" }, "lenient": { "type" : "boolean", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json index e0e5170f9c3..8aba39e7710 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json @@ -23,9 +23,9 @@ } }, "params": { - "fields": { + "stored_fields": { "type": "list", - "description" : "A comma-separated list of fields to return in the response" + "description" : "A comma-separated list of stored fields to return in the response" }, "parent": { "type" : "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index b46e3fcf116..677219addee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -39,7 +39,7 @@ "refresh": { "type" : "enum", "options": ["true", "false", "wait_for"], - "description" : "If `true` then refresh the effected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes." + "description" : "If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes." }, "routing": { "type" : "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json index 0e5e4ffd244..66111456719 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json @@ -29,7 +29,7 @@ "wait_for_completion": { "type" : "boolean", "description" : "Specify whether the request should block until the all segments are upgraded (default: false)" - }, + }, "only_ancient_segments": { "type" : "boolean", "description" : "If true, only ancient (an older Lucene major release) segments will be upgraded" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json index 1639f3619b3..1f1f5adf75e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json @@ -16,9 +16,9 @@ } }, "params": { - "fields": { + "stored_fields": { "type": "list", - "description" : "A comma-separated list of fields to return in the response" + "description" : "A comma-separated list of stored fields to return in the response" }, "preference": { "type" : "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 39aa53b2572..57e0e189668 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -1,6 +1,6 @@ { "msearch_template": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html", "methods": ["GET", "POST"], "url": { "path": "/_msearch/template", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index a18d081f9b7..d87e4c5e7f5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -31,6 +31,18 @@ "type": "list", "description": "A comma-separated list of fields to return in the response" }, + "_source": { + "type" : "list", + "description" : "True or false to return the _source field or not, or a list of fields to return" + }, + "_source_exclude": { + "type" : "list", + "description" : "A list of fields to exclude from the returned _source field" + }, + "_source_include": { + "type" : "list", + "description" : "A list of fields to extract and return from the _source field" + }, "lang": { "type": "string", "description": "The script language (default: groovy)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 72149adc663..b7f608b8b4f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -40,13 +40,17 @@ "type" : "boolean", "description" : "Specify whether to return detailed information about score computation as part of a hit" }, - "fields": { + "stored_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as part of a hit" + "description" : "A comma-separated list of stored fields to return as part of a hit" + }, + "docvalue_fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" }, "fielddata_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" + "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" }, "from": { "type" : "number", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_fields.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_fields.yaml deleted file mode 100644 index 3aa9d522633..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_fields.yaml +++ /dev/null @@ -1,49 +0,0 @@ ---- -"Fields": - - do: - index: - refresh: true - index: test_index - type: test_type - id: test_id_1 - body: { "foo": "bar" } - - - do: - index: - refresh: true - index: test_index - type: test_type - id: test_id_2 - body: { "foo": "qux" } - - - do: - index: - refresh: true - index: test_index - type: test_type - id: test_id_3 - body: { "foo": "corge" } - - - - do: - bulk: - refresh: true - body: | - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "fields": ["_source"] } } - { "doc": { "foo": "baz" } } - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } - { "fields": ["_source"], "doc": { "foo": "quux" } } - - - match: { items.0.update.get._source.foo: baz } - - match: { items.1.update.get._source.foo: quux } - - - do: - bulk: - index: test_index - type: test_type - fields: _source - body: | - { "update": { "_id": "test_id_3" } } - { "doc": { "foo": "garply" } } - - - match: { items.0.update.get._source.foo: garply } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yaml new file mode 100644 index 00000000000..c852c376cc0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yaml @@ -0,0 +1,76 @@ +--- +"Source filtering": + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_1 + body: { "foo": "bar", "bar": "foo" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_2 + body: { "foo": "qux", "bar": "pux" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_3 + body: { "foo": "corge", "bar": "forge" } + + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": true, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.foo: baz } + - match: { items.1.update.get._source.foo: quux } + + - do: + bulk: + index: test_index + type: test_type + _source: true + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.bar: foo } + - is_false: items.0.update.get._source.foo + - match: { items.1.update.get._source.foo: quux } + - is_false: items.1.update.get._source.bar + + - do: + bulk: + index: test_index + type: test_type + _source_include: foo + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + - is_false: items.0.update.get._source.bar + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index b07b5dadd98..24619e53353 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -70,6 +70,47 @@ ) $/ +--- +"Test cat indices using health status": + + - do: + cluster.health: {} + + - set: { number_of_data_nodes: count } + + - do: + indices.create: + index: foo + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + - do: + indices.create: + index: bar + body: + settings: + number_of_shards: "1" + number_of_replicas: $count + + - do: + cat.indices: + health: green + h: index + + - match: + $body: | + /^(foo)$/ + + - do: + cat.indices: + health: yellow + h: index + + - match: + $body: | + /^(bar)$/ + --- "Test cat indices using wildcards": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yaml index 02f011d75b0..44447ebee8c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yaml @@ -28,7 +28,7 @@ type: test id: 1 routing: 5 - fields: [_routing] + stored_fields: [_routing] - match: { _id: "1"} - match: { _routing: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml index cac5387370d..cb0dfcfe78c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml @@ -31,7 +31,7 @@ type: test id: 1 parent: 5 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/55_parent_with_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/55_parent_with_routing.yaml index 55b840526e5..5f352ac90c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/55_parent_with_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/55_parent_with_routing.yaml @@ -32,7 +32,7 @@ id: 1 parent: 5 routing: 4 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_fields.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yaml similarity index 89% rename from rest-api-spec/src/main/resources/rest-api-spec/test/get/20_fields.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yaml index 3de8ab1d91d..fbffb9e0ea8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_fields.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yaml @@ -1,5 +1,5 @@ --- -"Fields": +"Stored fields": - do: indices.create: @@ -26,7 +26,7 @@ index: test_1 type: test id: 1 - fields: foo + stored_fields: foo - match: { _index: test_1 } - match: { _type: test } @@ -39,7 +39,7 @@ index: test_1 type: test id: 1 - fields: [foo, count] + stored_fields: [foo, count] - match: { fields.foo: [bar] } - match: { fields.count: [1] } @@ -50,7 +50,7 @@ index: test_1 type: test id: 1 - fields: [foo, count, _source] + stored_fields: [foo, count, _source] - match: { fields.foo: [bar] } - match: { fields.count: [1] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml index 4a842cee2e0..353dce8fab7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml @@ -25,7 +25,7 @@ setup: type: test id: 1 parent: 中文 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: 中文 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yaml index 11fe04884b3..94a40c0437a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yaml @@ -28,7 +28,7 @@ type: test id: 1 routing: 5 - fields: [_routing] + stored_fields: [_routing] - match: { _id: "1"} - match: { _routing: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/55_parent_with_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/55_parent_with_routing.yaml index a7ed2df0dd7..db71d164230 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/55_parent_with_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/55_parent_with_routing.yaml @@ -32,7 +32,7 @@ id: 1 parent: 5 routing: 4 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yaml index 2c85a2d54ab..c858886ca3d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yaml @@ -58,7 +58,7 @@ index: test_1 type: test id: 1 - fields: count + stored_fields: count _source: true - match: { _index: test_1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yaml index bb248a458ae..7b3c21df4e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yaml @@ -28,7 +28,7 @@ type: test id: 1 routing: 5 - fields: [_routing] + stored_fields: [_routing] - match: { _id: "1"} - match: { _routing: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml index 916e1ac3520..89077dac4ae 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml @@ -31,7 +31,7 @@ type: test id: 1 parent: 5 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/55_parent_with_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/55_parent_with_routing.yaml index a7ed2df0dd7..db71d164230 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/55_parent_with_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/55_parent_with_routing.yaml @@ -32,7 +32,7 @@ id: 1 parent: 5 routing: 4 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _id: "1"} - match: { _parent: "5"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yaml similarity index 73% rename from rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yaml index 2216c6540b8..33cab111a8b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yaml @@ -1,5 +1,5 @@ --- -"Fields": +"Stored fields": - do: indices.create: @@ -29,9 +29,9 @@ body: docs: - { _id: 1 } - - { _id: 1, fields: foo } - - { _id: 1, fields: [foo] } - - { _id: 1, fields: [foo, _source] } + - { _id: 1, stored_fields: foo } + - { _id: 1, stored_fields: [foo] } + - { _id: 1, stored_fields: [foo, _source] } - is_false: docs.0.fields - match: { docs.0._source: { foo: bar }} @@ -49,13 +49,13 @@ mget: index: test_1 type: test - fields: foo + stored_fields: foo body: docs: - { _id: 1 } - - { _id: 1, fields: foo } - - { _id: 1, fields: [foo] } - - { _id: 1, fields: [foo, _source] } + - { _id: 1, stored_fields: foo } + - { _id: 1, stored_fields: [foo] } + - { _id: 1, stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -73,13 +73,13 @@ mget: index: test_1 type: test - fields: [foo] + stored_fields: [foo] body: docs: - { _id: 1 } - - { _id: 1, fields: foo } - - { _id: 1, fields: [foo] } - - { _id: 1, fields: [foo, _source] } + - { _id: 1, stored_fields: foo } + - { _id: 1, stored_fields: [foo] } + - { _id: 1, stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -97,13 +97,13 @@ mget: index: test_1 type: test - fields: [foo, _source] + stored_fields: [foo, _source] body: docs: - { _id: 1 } - - { _id: 1, fields: foo } - - { _id: 1, fields: [foo] } - - { _id: 1, fields: [foo, _source] } + - { _id: 1, stored_fields: foo } + - { _id: 1, stored_fields: [foo] } + - { _id: 1, stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - match: { docs.0._source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml index e1cf8df4fc3..ad064df69ce 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml @@ -34,9 +34,9 @@ body: docs: - { _id: 1 } - - { _id: 1, parent: 5, fields: [ _parent, _routing ] } - - { _id: 1, parent: 4, fields: [ _parent, _routing ] } - - { _id: 2, parent: 5, fields: [ _parent, _routing ] } + - { _id: 1, parent: 5, stored_fields: [ _parent, _routing ] } + - { _id: 1, parent: 4, stored_fields: [ _parent, _routing ] } + - { _id: 2, parent: 5, stored_fields: [ _parent, _routing ] } - is_false: docs.0.found - is_false: docs.1.found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yaml index 71ac0feabbe..7196412ebf3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yaml @@ -26,7 +26,7 @@ mget: index: test_1 type: test - fields: [_routing] + stored_fields: [_routing] body: docs: - { _id: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/55_parent_with_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/55_parent_with_routing.yaml index 19b597675cf..1fa1ce2cddb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/55_parent_with_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/55_parent_with_routing.yaml @@ -29,7 +29,7 @@ mget: index: test_1 type: test - fields: [ _routing , _parent] + stored_fields: [ _routing , _parent] body: docs: - { _id: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yaml index e96c175489e..097f49007e5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yaml @@ -30,7 +30,7 @@ type: test id: 1 routing: 5 - fields: _routing + stored_fields: _routing - match: { _routing: "5"} @@ -49,9 +49,9 @@ type: test id: 1 routing: 5 - fields: foo + _source: foo body: doc: { foo: baz } - - match: { get.fields.foo: [baz] } + - match: { get._source.foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml index b25662dbf14..82508f951e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml @@ -36,7 +36,7 @@ setup: type: test id: 1 parent: 5 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _parent: "5"} - match: { _routing: "5"} @@ -47,11 +47,11 @@ setup: type: test id: 1 parent: 5 - fields: foo + _source: foo body: doc: { foo: baz } - - match: { get.fields.foo: [baz] } + - match: { get._source.foo: baz } --- "Parent omitted": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/55_parent_with_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/55_parent_with_routing.yaml index 89dc83198c6..e75eddff9a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/55_parent_with_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/55_parent_with_routing.yaml @@ -34,7 +34,7 @@ id: 1 routing: 4 parent: 5 - fields: [_parent, _routing] + stored_fields: [_parent, _routing] - match: { _parent: "5"} - match: { _routing: "4"} @@ -56,9 +56,9 @@ id: 1 parent: 5 routing: 4 - fields: foo + _source: foo body: doc: { foo: baz } - - match: { get.fields.foo: [baz] } + - match: { get._source.foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_fields.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yaml similarity index 68% rename from rest-api-spec/src/main/resources/rest-api-spec/test/update/80_fields.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yaml index 86d6afa069d..4bb22e6b801 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_fields.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yaml @@ -1,19 +1,18 @@ --- -"Fields": +"Source filtering": - do: update: index: test_1 type: test id: 1 - fields: foo,bar,_source + _source: [foo, bar] body: doc: { foo: baz } upsert: { foo: bar } - match: { get._source.foo: bar } - - match: { get.fields.foo: [bar] } - - is_false: get.fields.bar + - is_false: get._source.bar # TODO: # diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml index 5478d84e2a3..7a6a58e12c0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml @@ -33,6 +33,6 @@ type: test id: 1 parent: 5 - fields: [ _parent, _routing ] + stored_fields: [ _parent, _routing ] diff --git a/settings.gradle b/settings.gradle index 42e965a3288..13cd992a6fc 100644 --- a/settings.gradle +++ b/settings.gradle @@ -39,6 +39,7 @@ List projects = [ 'plugins:analysis-stempel', 'plugins:discovery-azure-classic', 'plugins:discovery-ec2', + 'plugins:discovery-file', 'plugins:discovery-gce', 'plugins:ingest-geoip', 'plugins:ingest-attachment', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 96921c3c90d..ed8725fa008 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -25,7 +25,6 @@ import org.elasticsearch.SecureSM; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.plugins.PluginInfo; import org.junit.Assert; diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java index 2a04a5be97f..b3bbd5a9a43 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -32,7 +32,7 @@ import static org.hamcrest.CoreMatchers.equalTo; abstract class ESElasticsearchCliTestCase extends ESTestCase { interface InitConsumer { - void accept(final boolean foreground, final Path pidFile, final Map esSettings); + void accept(final boolean foreground, final Path pidFile, final boolean quiet, final Map esSettings); } void runTest( @@ -46,9 +46,9 @@ abstract class ESElasticsearchCliTestCase extends ESTestCase { final AtomicBoolean init = new AtomicBoolean(); final int status = Elasticsearch.main(args, new Elasticsearch() { @Override - void init(final boolean daemonize, final Path pidFile, final Map esSettings) { + void init(final boolean daemonize, final Path pidFile, final boolean quiet, final Map esSettings) { init.set(true); - initConsumer.accept(!daemonize, pidFile, esSettings); + initConsumer.accept(!daemonize, pidFile, quiet, esSettings); } }, terminal); assertThat(status, equalTo(expectedStatus)); diff --git a/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java b/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java index e576d177066..5b00f9f2e86 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/common/io/PathUtilsForTesting.java @@ -30,7 +30,7 @@ public class PathUtilsForTesting { /** Sets a new default filesystem for testing */ public static void setup() { - installMock(LuceneTestCase.getBaseTempDirForTestClass().getFileSystem()); + installMock(LuceneTestCase.createTempDir().getFileSystem()); } /** Installs a mock filesystem for testing */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 0560ec2a910..c76d4b03cff 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -39,7 +39,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -47,10 +46,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.inject.ModulesBuilder; -import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -74,6 +69,7 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -85,7 +81,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -100,7 +95,6 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.threadpool.ThreadPool; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.After; @@ -143,7 +137,7 @@ public abstract class AbstractQueryTestCase> protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; - protected static final String GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; + protected static final String LEGACY_GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME}; @@ -159,6 +153,7 @@ public abstract class AbstractQueryTestCase> private static String[] currentTypes; private static String[] randomTypes; + protected static Index getIndex() { return index; } @@ -1014,7 +1009,6 @@ public abstract class AbstractQueryTestCase> private static class ServiceHolder implements Closeable { - private final Injector injector; private final IndicesQueriesRegistry indicesQueriesRegistry; private final IndexFieldDataService indexFieldDataService; private final SearchModule searchModule; @@ -1025,18 +1019,14 @@ public abstract class AbstractQueryTestCase> private final MapperService mapperService; private final BitsetFilterCache bitsetFilterCache; private final ScriptService scriptService; + private final Client client; ServiceHolder(Settings nodeSettings, Settings indexSettings, Collection> plugins, AbstractQueryTestCase testCase) throws IOException { - final ThreadPool threadPool = new ThreadPool(nodeSettings); - ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - ClusterServiceUtils.setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData( - new MetaData.Builder().put(new IndexMetaData.Builder( - index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings, null); PluginsService pluginsService = new PluginsService(nodeSettings, env.modulesFile(), env.pluginsFile(), plugins); - final Client proxy = (Client) Proxy.newProxyInstance( + client = (Client) Proxy.newProxyInstance( Client.class.getClassLoader(), new Class[]{Client.class}, clientInvocationHandler); @@ -1046,47 +1036,23 @@ public abstract class AbstractQueryTestCase> scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); SettingsModule settingsModule = new SettingsModule(nodeSettings, scriptSettings, pluginsService.getPluginSettingsFilter()); searchModule = new SearchModule(nodeSettings, false, pluginsService.filterPlugins(SearchPlugin.class)); - IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)) { - @Override - public void configure() { - // skip services - bindMapperExtension(); - } - }; + IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)); List entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries); - ModulesBuilder modulesBuilder = new ModulesBuilder(); - for (Module pluginModule : pluginsService.createGuiceModules()) { - modulesBuilder.add(pluginModule); - } - modulesBuilder.add( - b -> { - b.bind(PluginsService.class).toInstance(pluginsService); - b.bind(Environment.class).toInstance(new Environment(nodeSettings)); - b.bind(ThreadPool.class).toInstance(threadPool); - b.bind(Client.class).toInstance(proxy); - b.bind(ClusterService.class).toProvider(Providers.of(clusterService)); - b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); - }, - settingsModule, indicesModule, searchModule, new IndexSettingsModule(index, indexSettings) - ); - pluginsService.processModules(modulesBuilder); - injector = modulesBuilder.createInjector(); - IndexScopedSettings indexScopedSettings = injector.getInstance(IndexScopedSettings.class); + IndexScopedSettings indexScopedSettings = settingsModule.getIndexScopedSettings(); idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); AnalysisModule analysisModule = new AnalysisModule(new Environment(nodeSettings), emptyList()); AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); scriptService = scriptModule.getScriptService(); similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, this::createShardContext); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, - injector.getInstance(CircuitBreakerService.class), mapperService); + new NoneCircuitBreakerService(), mapperService); bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { @Override public void onCache(ShardId shardId, Accountable accountable) { @@ -1098,7 +1064,10 @@ public abstract class AbstractQueryTestCase> } }); - indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); + indicesQueriesRegistry = searchModule.getQueryParserRegistry(); + + String geoFieldMapping = (idxSettings.getIndexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) ? + LEGACY_GEO_POINT_FIELD_MAPPING : "type=geo_point"; for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, @@ -1109,7 +1078,7 @@ public abstract class AbstractQueryTestCase> BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, GEO_POINT_FIELD_MAPPING, + GEO_POINT_FIELD_NAME, geoFieldMapping, GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field @@ -1119,24 +1088,17 @@ public abstract class AbstractQueryTestCase> MapperService.MergeReason.MAPPING_UPDATE, false); } testCase.initializeAdditionalMappings(mapperService); - this.namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); + this.namedWriteableRegistry = namedWriteableRegistry; } @Override public void close() throws IOException { - injector.getInstance(ClusterService.class).close(); - try { - terminate(injector.getInstance(ThreadPool.class)); - } catch (InterruptedException e) { - IOUtils.reThrow(e); - } } QueryShardContext createShardContext() { ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - Client client = injector.getInstance(Client.class); return new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, - scriptService, indicesQueriesRegistry, client, null, state); + scriptService, indicesQueriesRegistry, this.client, null, state); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java index 76f68a7c098..944ddb9b05f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java @@ -21,12 +21,16 @@ package org.elasticsearch.test.disruption; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.test.InternalTestCluster; -import java.util.HashSet; +import java.util.Arrays; import java.util.Random; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; +import java.util.stream.Collectors; /** * Suspends all threads on the specified node in order to simulate a long gc. @@ -34,8 +38,8 @@ import java.util.regex.Pattern; public class LongGCDisruption extends SingleNodeDisruption { private static final Pattern[] unsafeClasses = new Pattern[]{ - // logging has shared JVM locks - we may suspend a thread and block other nodes from doing their thing - Pattern.compile("Logger") + // logging has shared JVM locks - we may suspend a thread and block other nodes from doing their thing + Pattern.compile("logging\\.log4j") }; protected final String disruptedNode; @@ -49,13 +53,67 @@ public class LongGCDisruption extends SingleNodeDisruption { @Override public synchronized void startDisrupting() { if (suspendedThreads == null) { - suspendedThreads = new HashSet<>(); - stopNodeThreads(disruptedNode, suspendedThreads); + boolean success = false; + try { + suspendedThreads = ConcurrentHashMap.newKeySet(); + + final String currentThreadName = Thread.currentThread().getName(); + assert currentThreadName.contains("[" + disruptedNode + "]") == false : + "current thread match pattern. thread name: " + currentThreadName + ", node: " + disruptedNode; + // we spawn a background thread to protect against deadlock which can happen + // if there are shared resources between caller thread and and suspended threads + // see unsafeClasses to how to avoid that + final AtomicReference stoppingError = new AtomicReference<>(); + final Thread stoppingThread = new Thread(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + stoppingError.set(e); + } + + @Override + protected void doRun() throws Exception { + // keep trying to stop threads, until no new threads are discovered. + while (stopNodeThreads(disruptedNode, suspendedThreads)) { + if (Thread.interrupted()) { + return; + } + } + } + }); + stoppingThread.setName(currentThreadName + "[LongGCDisruption][threadStopper]"); + stoppingThread.start(); + try { + stoppingThread.join(getStoppingTimeoutInMillis()); + } catch (InterruptedException e) { + stoppingThread.interrupt(); // best effort to signal stopping + throw new RuntimeException(e); + } + if (stoppingError.get() != null) { + throw new RuntimeException("unknown error while stopping threads", stoppingError.get()); + } + if (stoppingThread.isAlive()) { + logger.warn("failed to stop node [{}]'s threads within [{}] millis. Stopping thread stack trace:\n {}" + , disruptedNode, getStoppingTimeoutInMillis(), stackTrace(stoppingThread)); + stoppingThread.interrupt(); // best effort; + throw new RuntimeException("stopping node threads took too long"); + } + success = true; + } finally { + if (success == false) { + // resume threads if failed + resumeThreads(suspendedThreads); + suspendedThreads = null; + } + } } else { throw new IllegalStateException("can't disrupt twice, call stopDisrupting() first"); } } + private String stackTrace(Thread thread) { + return Arrays.stream(thread.getStackTrace()).map(Object::toString).collect(Collectors.joining("\n")); + } + @Override public synchronized void stopDisrupting() { if (suspendedThreads != null) { @@ -75,6 +133,13 @@ public class LongGCDisruption extends SingleNodeDisruption { return TimeValue.timeValueMillis(0); } + /** + * resolves all threads belonging to given node and suspends them if their current stack trace + * is "safe". Threads are added to nodeThreads if suspended. + * + * returns true if some live threads were found. The caller is expected to call this method + * until no more "live" are found. + */ @SuppressWarnings("deprecation") // stops/resumes threads intentionally @SuppressForbidden(reason = "stops/resumes threads intentionally") protected boolean stopNodeThreads(String node, Set nodeThreads) { @@ -86,7 +151,7 @@ public class LongGCDisruption extends SingleNodeDisruption { allThreads = null; } } - boolean stopped = false; + boolean liveThreadsFound = false; final String nodeThreadNamePart = "[" + node + "]"; for (Thread thread : allThreads) { if (thread == null) { @@ -95,7 +160,7 @@ public class LongGCDisruption extends SingleNodeDisruption { String name = thread.getName(); if (name.contains(nodeThreadNamePart)) { if (thread.isAlive() && nodeThreads.add(thread)) { - stopped = true; + liveThreadsFound = true; logger.trace("stopping thread [{}]", name); thread.suspend(); // double check the thread is not in a shared resource like logging. If so, let it go and come back.. @@ -103,7 +168,7 @@ public class LongGCDisruption extends SingleNodeDisruption { safe: for (StackTraceElement stackElement : thread.getStackTrace()) { String className = stackElement.getClassName(); - for (Pattern unsafePattern : unsafeClasses) { + for (Pattern unsafePattern : getUnsafeClasses()) { if (unsafePattern.matcher(className).find()) { safe = false; break safe; @@ -118,7 +183,17 @@ public class LongGCDisruption extends SingleNodeDisruption { } } } - return stopped; + return liveThreadsFound; + } + + // for testing + protected Pattern[] getUnsafeClasses() { + return unsafeClasses; + } + + // for testing + protected long getStoppingTimeoutInMillis() { + return TimeValue.timeValueSeconds(30).getMillis(); } @SuppressWarnings("deprecation") // stops/resumes threads intentionally diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java new file mode 100644 index 00000000000..38190444758 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java @@ -0,0 +1,151 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.disruption; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; +import java.util.regex.Pattern; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class LongGCDisruptionTest extends ESTestCase { + + static class LockedExecutor { + ReentrantLock lock = new ReentrantLock(); + + public void executeLocked(Runnable r) { + lock.lock(); + try { + r.run(); + } finally { + lock.unlock(); + } + } + } + + public void testBlockingTimeout() throws Exception { + final String nodeName = "test_node"; + LongGCDisruption disruption = new LongGCDisruption(random(), nodeName) { + @Override + protected Pattern[] getUnsafeClasses() { + return new Pattern[]{ + Pattern.compile(LockedExecutor.class.getSimpleName()) + }; + } + + @Override + protected long getStoppingTimeoutInMillis() { + return 100; + } + }; + final AtomicBoolean stop = new AtomicBoolean(); + final CountDownLatch underLock = new CountDownLatch(1); + final CountDownLatch pauseUnderLock = new CountDownLatch(1); + final LockedExecutor lockedExecutor = new LockedExecutor(); + final AtomicLong ops = new AtomicLong(); + try { + Thread[] threads = new Thread[10]; + for (int i = 0; i < 10; i++) { + // at least one locked and one none lock thread + final boolean lockedExec = (i < 9 && randomBoolean()) || i == 0; + threads[i] = new Thread(() -> { + while (stop.get() == false) { + if (lockedExec) { + lockedExecutor.executeLocked(() -> { + try { + underLock.countDown(); + ops.incrementAndGet(); + pauseUnderLock.await(); + } catch (InterruptedException e) { + + } + }); + } else { + ops.incrementAndGet(); + } + } + }); + threads[i].setName("[" + nodeName + "][" + i + "]"); + threads[i].start(); + } + // make sure some threads are under lock + underLock.await(); + RuntimeException e = expectThrows(RuntimeException.class, disruption::startDisrupting); + assertThat(e.getMessage(), containsString("stopping node threads took too long")); + } finally { + stop.set(true); + pauseUnderLock.countDown(); + } + } + + /** + * Checks that a GC disruption never blocks threads while they are doing something "unsafe" + * but does keep retrying until all threads can be safely paused + */ + public void testNotBlockingUnsafeStackTraces() throws Exception { + final String nodeName = "test_node"; + LongGCDisruption disruption = new LongGCDisruption(random(), nodeName) { + @Override + protected Pattern[] getUnsafeClasses() { + return new Pattern[]{ + Pattern.compile(LockedExecutor.class.getSimpleName()) + }; + } + }; + final AtomicBoolean stop = new AtomicBoolean(); + final LockedExecutor lockedExecutor = new LockedExecutor(); + final AtomicLong ops = new AtomicLong(); + try { + Thread[] threads = new Thread[10]; + for (int i = 0; i < 10; i++) { + threads[i] = new Thread(() -> { + for (int iter = 0; stop.get() == false; iter++) { + if (iter % 2 == 0) { + lockedExecutor.executeLocked(() -> { + Thread.yield(); // give some chance to catch this stack trace + ops.incrementAndGet(); + }); + } else { + Thread.yield(); // give some chance to catch this stack trace + ops.incrementAndGet(); + } + } + }); + threads[i].setName("[" + nodeName + "][" + i + "]"); + threads[i].start(); + } + // make sure some threads are under lock + disruption.startDisrupting(); + long first = ops.get(); + assertThat(lockedExecutor.lock.isLocked(), equalTo(false)); // no threads should own the lock + Thread.sleep(100); + assertThat(ops.get(), equalTo(first)); + disruption.stopDisrupting(); + assertBusy(() -> assertThat(ops.get(), greaterThan(first))); + } finally { + stop.set(true); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java index e09cc7534e6..d0e799d63fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/annotations/TestLogging.java @@ -32,7 +32,6 @@ import static java.lang.annotation.ElementType.TYPE; * It supports multiple logger:level comma separated key value pairs * Use the _root keyword to set the root logger level * e.g. @TestLogging("_root:DEBUG,org.elasticsearch.cluster.metadata:TRACE") - * or just @TestLogging("_root:DEBUG,cluster.metadata:TRACE") since we start the test with -Des.logger.prefix= */ @Retention(RetentionPolicy.RUNTIME) @Target({PACKAGE, TYPE, METHOD}) diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 9c4612b22b3..90ac8ed78ca 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -642,6 +642,4 @@ public class MockTransportService extends TransportService { } } } - - } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index c65e885a9bc..ba831dde092 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -224,8 +224,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (Exception e) { assertThat(e.getMessage(), false, equalTo(true)); } - - serviceA.removeHandler("sayHello"); } public void testThreadContext() throws ExecutionException, InterruptedException { @@ -281,8 +279,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); assertSame(context, threadPool.getThreadContext().getTransient("my_private_context")); assertNull("this header is only visible in the handler context", threadPool.getThreadContext().getHeader("some.temp.header")); - - serviceA.removeHandler("sayHello"); } public void testLocalNodeConnection() throws InterruptedException { @@ -375,8 +371,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (Exception e) { assertThat(e.getMessage(), false, equalTo(true)); } - - serviceA.removeHandler("sayHello"); } public void testHelloWorldCompressed() { @@ -426,8 +420,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (Exception e) { assertThat(e.getMessage(), false, equalTo(true)); } - - serviceA.removeHandler("sayHello"); } public void testErrorMessage() { @@ -469,8 +461,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (Exception e) { assertThat(e.getCause().getMessage(), equalTo("runtime_exception: bad message !!!")); } - - serviceA.removeHandler("sayHelloException"); } public void testDisconnectListener() throws Exception { @@ -635,7 +625,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (TransportException ex) { } - serviceA.removeHandler("sayHelloTimeoutDelayedResponse"); } public void testTimeoutSendExceptionWithNeverSendingBackResponse() throws Exception { @@ -678,8 +667,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (Exception e) { assertThat(e, instanceOf(ReceiveTimeoutTransportException.class)); } - - serviceA.removeHandler("sayHelloTimeoutNoResponse"); } public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { @@ -785,13 +772,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { for (Runnable runnable : assertions) { runnable.run(); } - serviceA.removeHandler("sayHelloTimeoutDelayedResponse"); waitForever.countDown(); doneWaitingForever.await(); assertTrue(inFlight.tryAcquire(Integer.MAX_VALUE, 10, TimeUnit.SECONDS)); } - @TestLogging(value = "test.transport.tracer:TRACE") + @TestLogging(value = "org.elasticsearch.test.transport.tracer:TRACE") public void testTracerLog() throws InterruptedException { TransportRequestHandler handler = new TransportRequestHandler() { @Override @@ -1325,8 +1311,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (ConnectTransportException e) { // all is well } - - serviceA.removeHandler("sayHello"); } public void testMockUnresponsiveRule() { @@ -1385,8 +1369,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } catch (ConnectTransportException e) { // all is well } - - serviceA.removeHandler("sayHello"); } @@ -1721,4 +1703,16 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceC.close(); } + + public void testRegisterHandlerTwice() { + serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + (request, message) -> {throw new AssertionError("boom");}); + expectThrows(IllegalArgumentException.class, () -> + serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + (request, message) -> {throw new AssertionError("boom");}) + ); + + serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + (request, message) -> {throw new AssertionError("boom");}); + } } diff --git a/test/framework/src/main/resources/log4j2-test.properties b/test/framework/src/main/resources/log4j2-test.properties index 9cfe3e326aa..f5ab7ae8a2b 100644 --- a/test/framework/src/main/resources/log4j2-test.properties +++ b/test/framework/src/main/resources/log4j2-test.properties @@ -3,7 +3,7 @@ status = error appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console