Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
2820dba2d7
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 7.4.0-snapshot-1cbadda4d3
|
||||
lucene = 7.4.0-snapshot-0a7c3f462f
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
|
@ -38,6 +39,8 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
|
@ -134,11 +137,34 @@ public final class IndicesClient {
|
|||
* Put Mapping API on elastic.co</a>
|
||||
*/
|
||||
public void putMappingAsync(PutMappingRequest putMappingRequest, ActionListener<PutMappingResponse> listener,
|
||||
Header... headers) {
|
||||
Header... headers) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, RequestConverters::putMapping,
|
||||
PutMappingResponse::fromXContent, listener, emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the mappings on an index or indices using the Get Mapping API
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html">
|
||||
* Get Mapping API on elastic.co</a>
|
||||
*/
|
||||
public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, Header... headers) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings,
|
||||
GetMappingsResponse::fromXContent, emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously retrieves the mappings on an index on indices using the Get Mapping API
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html">
|
||||
* Get Mapping API on elastic.co</a>
|
||||
*/
|
||||
public void getMappingsAsync(GetMappingsRequest getMappingsRequest, ActionListener<GetMappingsResponse> listener,
|
||||
Header... headers) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings,
|
||||
GetMappingsResponse::fromXContent, listener, emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates aliases using the Index Aliases API
|
||||
* <p>
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
|
@ -195,6 +196,19 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException {
|
||||
String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices();
|
||||
String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types();
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(indices, "_mapping", types));
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
|
||||
parameters.withLocal(getMappingsRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request refresh(RefreshRequest refreshRequest) {
|
||||
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh"));
|
||||
|
|
|
@ -42,6 +42,8 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
|
@ -79,6 +81,7 @@ import org.elasticsearch.rest.RestStatus;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
|
@ -328,6 +331,42 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetMapping() throws IOException {
|
||||
String indexName = "test";
|
||||
createIndex(indexName, Settings.EMPTY);
|
||||
|
||||
PutMappingRequest putMappingRequest = new PutMappingRequest(indexName);
|
||||
putMappingRequest.type("_doc");
|
||||
XContentBuilder mappingBuilder = JsonXContent.contentBuilder();
|
||||
mappingBuilder.startObject().startObject("properties").startObject("field");
|
||||
mappingBuilder.field("type", "text");
|
||||
mappingBuilder.endObject().endObject().endObject();
|
||||
putMappingRequest.source(mappingBuilder);
|
||||
|
||||
PutMappingResponse putMappingResponse =
|
||||
execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync);
|
||||
assertTrue(putMappingResponse.isAcknowledged());
|
||||
|
||||
Map<String, Object> getIndexResponse = getAsMap(indexName);
|
||||
assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings._doc.properties.field.type", getIndexResponse));
|
||||
|
||||
GetMappingsRequest request = new GetMappingsRequest()
|
||||
.indices(indexName)
|
||||
.types("_doc");
|
||||
|
||||
GetMappingsResponse getMappingsResponse =
|
||||
execute(request, highLevelClient().indices()::getMappings, highLevelClient().indices()::getMappingsAsync);
|
||||
|
||||
Map<String, Object> mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap();
|
||||
Map<String, String> type = new HashMap<>();
|
||||
type.put("type", "text");
|
||||
Map<String, Object> field = new HashMap<>();
|
||||
field.put("field", type);
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("properties", field);
|
||||
assertThat(mappings, equalTo(expected));
|
||||
}
|
||||
|
||||
public void testDeleteIndex() throws IOException {
|
||||
{
|
||||
// Delete index if exists
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
|
@ -403,6 +404,47 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertToXContentBody(putMappingRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetMapping() throws IOException {
|
||||
GetMappingsRequest getMappingRequest = new GetMappingsRequest();
|
||||
|
||||
String[] indices = Strings.EMPTY_ARRAY;
|
||||
if (randomBoolean()) {
|
||||
indices = randomIndicesNames(0, 5);
|
||||
getMappingRequest.indices(indices);
|
||||
} else if (randomBoolean()) {
|
||||
getMappingRequest.indices((String[]) null);
|
||||
}
|
||||
|
||||
String type = null;
|
||||
if (randomBoolean()) {
|
||||
type = randomAlphaOfLengthBetween(3, 10);
|
||||
getMappingRequest.types(type);
|
||||
} else if (randomBoolean()) {
|
||||
getMappingRequest.types((String[]) null);
|
||||
}
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
||||
setRandomIndicesOptions(getMappingRequest::indicesOptions, getMappingRequest::indicesOptions, expectedParams);
|
||||
setRandomMasterTimeout(getMappingRequest, expectedParams);
|
||||
setRandomLocal(getMappingRequest, expectedParams);
|
||||
|
||||
Request request = RequestConverters.getMappings(getMappingRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
String index = String.join(",", indices);
|
||||
if (Strings.hasLength(index)) {
|
||||
endpoint.add(index);
|
||||
}
|
||||
endpoint.add("_mapping");
|
||||
if (type != null) {
|
||||
endpoint.add(type);
|
||||
}
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
}
|
||||
|
||||
public void testDeleteIndex() {
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices);
|
||||
|
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
|
@ -64,6 +66,8 @@ import org.elasticsearch.action.support.IndicesOptions;
|
|||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.SyncedFlushResponse;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
@ -81,6 +85,8 @@ import java.util.Map;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* This class is used to generate the Java Indices API documentation.
|
||||
* You need to wrap your code between two tags like:
|
||||
|
@ -532,17 +538,17 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
// tag::put-mapping-execute-listener
|
||||
ActionListener<PutMappingResponse> listener =
|
||||
new ActionListener<PutMappingResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutMappingResponse putMappingResponse) {
|
||||
// <1>
|
||||
}
|
||||
new ActionListener<PutMappingResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutMappingResponse putMappingResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::put-mapping-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
|
@ -557,6 +563,133 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetMapping() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"));
|
||||
assertTrue(createIndexResponse.isAcknowledged());
|
||||
PutMappingRequest request = new PutMappingRequest("twitter");
|
||||
request.type("tweet");
|
||||
request.source(
|
||||
"{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"message\": {\n" +
|
||||
" \"type\": \"text\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}", // <1>
|
||||
XContentType.JSON);
|
||||
PutMappingResponse putMappingResponse = client.indices().putMapping(request);
|
||||
assertTrue(putMappingResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
{
|
||||
// tag::get-mapping-request
|
||||
GetMappingsRequest request = new GetMappingsRequest(); // <1>
|
||||
request.indices("twitter"); // <2>
|
||||
request.types("tweet"); // <3>
|
||||
// end::get-mapping-request
|
||||
|
||||
// tag::get-mapping-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::get-mapping-request-masterTimeout
|
||||
|
||||
// tag::get-mapping-request-indicesOptions
|
||||
request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
|
||||
// end::get-mapping-request-indicesOptions
|
||||
|
||||
// tag::get-mapping-execute
|
||||
GetMappingsResponse getMappingResponse = client.indices().getMappings(request);
|
||||
// end::get-mapping-execute
|
||||
|
||||
// tag::get-mapping-response
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> allMappings = getMappingResponse.mappings(); // <1>
|
||||
MappingMetaData typeMapping = allMappings.get("twitter").get("tweet"); // <2>
|
||||
Map<String, Object> tweetMapping = typeMapping.sourceAsMap(); // <3>
|
||||
// end::get-mapping-response
|
||||
|
||||
Map<String, String> type = new HashMap<>();
|
||||
type.put("type", "text");
|
||||
Map<String, Object> field = new HashMap<>();
|
||||
field.put("message", type);
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("properties", field);
|
||||
assertThat(tweetMapping, equalTo(expected));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetMappingAsync() throws Exception {
|
||||
final RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"));
|
||||
assertTrue(createIndexResponse.isAcknowledged());
|
||||
PutMappingRequest request = new PutMappingRequest("twitter");
|
||||
request.type("tweet");
|
||||
request.source(
|
||||
"{\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"message\": {\n" +
|
||||
" \"type\": \"text\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}", // <1>
|
||||
XContentType.JSON);
|
||||
PutMappingResponse putMappingResponse = client.indices().putMapping(request);
|
||||
assertTrue(putMappingResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
{
|
||||
GetMappingsRequest request = new GetMappingsRequest();
|
||||
request.indices("twitter");
|
||||
request.types("tweet");
|
||||
|
||||
// tag::get-mapping-execute-listener
|
||||
ActionListener<GetMappingsResponse> listener =
|
||||
new ActionListener<GetMappingsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetMappingsResponse putMappingResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-mapping-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final ActionListener<GetMappingsResponse> latchListener = new LatchedActionListener<>(listener, latch);
|
||||
listener = ActionListener.wrap(r -> {
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> allMappings = r.mappings();
|
||||
MappingMetaData typeMapping = allMappings.get("twitter").get("tweet");
|
||||
Map<String, Object> tweetMapping = typeMapping.sourceAsMap();
|
||||
|
||||
Map<String, String> type = new HashMap<>();
|
||||
type.put("type", "text");
|
||||
Map<String, Object> field = new HashMap<>();
|
||||
field.put("message", type);
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("properties", field);
|
||||
assertThat(tweetMapping, equalTo(expected));
|
||||
latchListener.onResponse(r);
|
||||
}, e -> {
|
||||
latchListener.onFailure(e);
|
||||
fail("should not fail");
|
||||
});
|
||||
|
||||
// tag::get-mapping-execute-async
|
||||
client.indices().getMappingsAsync(request, listener); // <1>
|
||||
// end::get-mapping-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testOpenIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
[[java-rest-high-get-mappings]]
|
||||
=== Get Mappings API
|
||||
|
||||
[[java-rest-high-get-mappings-request]]
|
||||
==== Get Mappings Request
|
||||
|
||||
A `GetMappingsRequest` can have an optional list of indices and optional list of types:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request]
|
||||
--------------------------------------------------
|
||||
<1> An empty request that will return all indices and types
|
||||
<2> Setting the indices to fetch mapping for
|
||||
<3> The types to be returned
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can also optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Options for expanding indices names
|
||||
|
||||
[[java-rest-high-get-mappings-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-get-mapping-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a get mappings request requires both the
|
||||
`GetMappingsRequest` instance and an `ActionListener` instance to be passed to
|
||||
the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetMappingsRequest` to execute and the `ActionListener` to use when the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method if
|
||||
the execution successfully completed or using the `onFailure` method if it
|
||||
failed.
|
||||
|
||||
A typical listener for `GetMappingsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-get-mapping-response]]
|
||||
==== Get Mappings Response
|
||||
|
||||
The returned `GetMappingsResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-response]
|
||||
--------------------------------------------------
|
||||
<1> Returning all indices' mappings
|
||||
<2> Retrieving the mappings for a particular index and type
|
||||
<3> Getting the mappings for the "tweet" as a Java Map
|
|
@ -95,6 +95,7 @@ include::indices/clear_cache.asciidoc[]
|
|||
include::indices/force_merge.asciidoc[]
|
||||
include::indices/rollover.asciidoc[]
|
||||
include::indices/put_mapping.asciidoc[]
|
||||
include::indices/get_mappings.asciidoc[]
|
||||
include::indices/update_aliases.asciidoc[]
|
||||
include::indices/exists_alias.asciidoc[]
|
||||
include::indices/put_settings.asciidoc[]
|
||||
|
|
|
@ -167,7 +167,7 @@ The result of the above get operation is:
|
|||
// TESTRESPONSE
|
||||
|
||||
|
||||
Field values fetched from the document it self are always returned as an array.
|
||||
Field values fetched from the document itself are always returned as an array.
|
||||
Since the `counter` field is not stored the get request simply ignores it when trying to get the `stored_fields.`
|
||||
|
||||
It is also possible to retrieve metadata fields like the `_routing` field:
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
* The boundary specified using geohashes in the `geo_bounding_box` query
|
||||
now include entire geohash cell, instead of just geohash center.
|
||||
|
||||
* Attempts to generate multi-term phrase queries against non-text fields
|
||||
with a custom analyzer will now throw an exception
|
||||
|
||||
==== Adaptive replica selection enabled by default
|
||||
|
||||
Adaptive replica selection has been enabled by default. If you wish to return to
|
||||
|
|
|
@ -36,3 +36,11 @@ GET /_search
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
WARNING: By default `span_multi queries are rewritten to a `span_or` query
|
||||
containing **all** the expanded terms. This can be expensive if the number of expanded
|
||||
terms is large. To avoid an unbounded expansion you can set the
|
||||
<<query-dsl-multi-term-rewrite,rewrite method>> of the multi term query to `top_terms_*`
|
||||
rewrite. Or, if you use `span_multi` on `prefix` query only, you can
|
||||
activate the <<index-prefix-config,`index_prefixes`>> field option of the `text` field instead. This will
|
||||
rewrite any prefix query on the field to a a single term query that matches the indexed prefix.
|
|
@ -155,7 +155,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
fieldParser = getParser(currentFieldName);
|
||||
fieldParser = getParser(currentFieldName, parser);
|
||||
} else {
|
||||
if (currentFieldName == null) {
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found");
|
||||
|
@ -341,10 +341,11 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
}
|
||||
}
|
||||
|
||||
private FieldParser getParser(String fieldName) {
|
||||
private FieldParser getParser(String fieldName, XContentParser xContentParser) {
|
||||
FieldParser parser = fieldParserMap.get(fieldName);
|
||||
if (parser == null && false == ignoreUnknownFields) {
|
||||
throw new IllegalArgumentException("[" + name + "] unknown field [" + fieldName + "], parser not found");
|
||||
throw new XContentParseException(xContentParser.getTokenLocation(),
|
||||
"[" + name + "] unknown field [" + fieldName + "], parser not found");
|
||||
}
|
||||
return parser;
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
|
@ -186,7 +185,6 @@ public class ObjectParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testExceptions() throws IOException {
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"test\" : \"foo\"}");
|
||||
class TestStruct {
|
||||
public void setTest(int test) {
|
||||
}
|
||||
|
@ -195,20 +193,16 @@ public class ObjectParserTests extends ESTestCase {
|
|||
TestStruct s = new TestStruct();
|
||||
objectParser.declareInt(TestStruct::setTest, new ParseField("test"));
|
||||
|
||||
try {
|
||||
objectParser.parse(parser, s, null);
|
||||
fail("numeric value expected");
|
||||
} catch (XContentParseException ex) {
|
||||
{
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"test\" : \"foo\"}");
|
||||
XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null));
|
||||
assertThat(ex.getMessage(), containsString("[the_parser] failed to parse field [test]"));
|
||||
assertTrue(ex.getCause() instanceof NumberFormatException);
|
||||
}
|
||||
|
||||
parser = createParser(JsonXContent.jsonXContent, "{\"not_supported_field\" : \"foo\"}");
|
||||
try {
|
||||
objectParser.parse(parser, s, null);
|
||||
fail("field not supported");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals(ex.getMessage(), "[the_parser] unknown field [not_supported_field], parser not found");
|
||||
{
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"not_supported_field\" : \"foo\"}");
|
||||
XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null));
|
||||
assertEquals(ex.getMessage(), "[1:2] [the_parser] unknown field [not_supported_field], parser not found");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -140,6 +141,42 @@ public class XContentParserTests extends ESTestCase {
|
|||
assertThat(map.size(), equalTo(0));
|
||||
}
|
||||
|
||||
public void testMap() throws IOException {
|
||||
String source = "{\"i\": {\"_doc\": {\"f1\": {\"type\": \"text\", \"analyzer\": \"english\"}, " +
|
||||
"\"f2\": {\"type\": \"object\", \"properties\": {\"sub1\": {\"type\": \"keyword\", \"foo\": 17}}}}}}";
|
||||
Map<String, Object> f1 = new HashMap<>();
|
||||
f1.put("type", "text");
|
||||
f1.put("analyzer", "english");
|
||||
|
||||
Map<String, Object> sub1 = new HashMap<>();
|
||||
sub1.put("type", "keyword");
|
||||
sub1.put("foo", 17);
|
||||
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
properties.put("sub1", sub1);
|
||||
|
||||
Map<String, Object> f2 = new HashMap<>();
|
||||
f2.put("type", "object");
|
||||
f2.put("properties", properties);
|
||||
|
||||
Map<String, Object> doc = new HashMap<>();
|
||||
doc.put("f1", f1);
|
||||
doc.put("f2", f2);
|
||||
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("_doc", doc);
|
||||
|
||||
Map<String, Object> i = new HashMap<>();
|
||||
i.put("i", expected);
|
||||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.START_OBJECT));
|
||||
Map<String, Object> map = parser.map();
|
||||
assertThat(map, equalTo(i));
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, String> readMapStrings(String source) throws IOException {
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
bf2cfa0551ebdf08a2cf3079f3c74643bd9dbb76
|
|
@ -1 +0,0 @@
|
|||
98c920972b2f5e8563540e805d87e6a3bc888972
|
|
@ -84,12 +84,6 @@ public class FeatureQueryBuilderTests extends AbstractQueryTestCase<FeatureQuery
|
|||
assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/30605")
|
||||
public void testUnknownField() {
|
||||
super.testUnknownField();
|
||||
}
|
||||
|
||||
public void testDefaultScoreFunction() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
String query = "{\n" +
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.text.Text;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -41,7 +42,7 @@ import java.util.List;
|
|||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||
|
||||
|
@ -280,9 +281,9 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
XContentParseException exception = expectThrows(XContentParseException.class,
|
||||
() -> DiscountedCumulativeGain.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), startsWith("[dcg_at] unknown field"));
|
||||
assertThat(exception.getMessage(), containsString("[dcg_at] unknown field"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.text.Text;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -41,7 +42,7 @@ import java.util.List;
|
|||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class MeanReciprocalRankTests extends ESTestCase {
|
||||
|
||||
|
@ -189,9 +190,9 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
XContentParseException exception = expectThrows(XContentParseException.class,
|
||||
() -> MeanReciprocalRank.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), startsWith("[reciprocal_rank] unknown field"));
|
||||
assertThat(exception.getMessage(), containsString("[reciprocal_rank] unknown field"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.text.Text;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -41,7 +42,7 @@ import java.util.List;
|
|||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class PrecisionAtKTests extends ESTestCase {
|
||||
|
||||
|
@ -203,8 +204,8 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> PrecisionAtK.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), startsWith("[precision] unknown field"));
|
||||
XContentParseException exception = expectThrows(XContentParseException.class, () -> PrecisionAtK.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), containsString("[precision] unknown field"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -33,7 +34,7 @@ import java.util.Collections;
|
|||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class RatedDocumentTests extends ESTestCase {
|
||||
|
||||
|
@ -59,8 +60,8 @@ public class RatedDocumentTests extends ESTestCase {
|
|||
BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
|
||||
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
|
||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> RatedDocument.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), startsWith("[rated_document] unknown field"));
|
||||
XContentParseException exception = expectThrows(XContentParseException.class, () -> RatedDocument.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), containsString("[rated_document] unknown field"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
82d83fcac1d9c8948aa0247fc9c87f177ddbd59b
|
|
@ -1 +0,0 @@
|
|||
844e2b76f4bc6e646e1c3257d668ac598e03f36a
|
|
@ -0,0 +1 @@
|
|||
73fd4364f2931e7c8303b5927e140a7d21116c36
|
|
@ -1 +0,0 @@
|
|||
2f2bd2d67c7952e4ae14ab3f742824a45d0d1719
|
|
@ -0,0 +1 @@
|
|||
0a2c4417fa9a8be078864f590a5a66b98d551cf5
|
|
@ -1 +0,0 @@
|
|||
46ad7ebcfcdbdb60dd54aae4d720356a7a51c7c0
|
|
@ -0,0 +1 @@
|
|||
6fa179924f139a30fc0e5399256e1a44562ed32b
|
|
@ -1 +0,0 @@
|
|||
548e9f2b4d4a985dc174b2eee4007c0bd5642e68
|
|
@ -0,0 +1 @@
|
|||
5ed135d34d7868b71a725257a46dc8d8735a15d4
|
|
@ -1 +0,0 @@
|
|||
b90e66f4104f0234cfef335762f65a6fed695231
|
|
@ -0,0 +1 @@
|
|||
875911b36b99c2103719f94559878a0ecb862fb6
|
|
@ -1 +0,0 @@
|
|||
929a4eb52b11f6d3f0df9c8eba014f5ee2464c67
|
|
@ -0,0 +1 @@
|
|||
e7191628df8cb72382a20da79224aef677117849
|
|
@ -1 +0,0 @@
|
|||
0e6575a411b65cd95e0e54f04d3da278b68be521
|
|
@ -34,6 +34,10 @@
|
|||
"default" : "open",
|
||||
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||
},
|
||||
"master_timeout": {
|
||||
"type" : "time",
|
||||
"description" : "Specify timeout for connection to master"
|
||||
},
|
||||
"local": {
|
||||
"type": "boolean",
|
||||
"description": "Return local information, do not retrieve the state from master node (default: false)"
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
"search with index prefixes":
|
||||
setup:
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
version: " - 6.2.99"
|
||||
reason: index_prefixes is only available as of 6.3.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
|
@ -27,6 +27,11 @@
|
|||
indices.refresh:
|
||||
index: [test]
|
||||
|
||||
---
|
||||
"search with index prefixes":
|
||||
- skip:
|
||||
version: " - 6.2.99"
|
||||
reason: index_prefixes is only available as of 6.3.0
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
|
@ -57,3 +62,23 @@
|
|||
|
||||
- match: {hits.total: 1}
|
||||
- match: {hits.hits.0._score: 1}
|
||||
|
||||
---
|
||||
"search index prefixes with span_multi":
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: span_multi throws an exception with prefix fields on < versions
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
query:
|
||||
span_near:
|
||||
clauses: [
|
||||
{ "span_term": { "text": "short" } },
|
||||
{ "span_multi": { "match": { "prefix": { "text": "word" } } } }
|
||||
]
|
||||
|
||||
- match: {hits.total: 1}
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
8cd761f40c4a89ed977167f0518d12e409eaf3d8
|
|
@ -1 +0,0 @@
|
|||
0f75703c30756c31f7d09ec79191dab6fb35c958
|
|
@ -0,0 +1 @@
|
|||
8c93ed67599d345b9359586248ab92342d7d3033
|
|
@ -1 +0,0 @@
|
|||
c5c519fdea65726612f79e3dd942b7316966646e
|
|
@ -0,0 +1 @@
|
|||
003ed080e5184661e606091cd321c229798b22f8
|
|
@ -1 +0,0 @@
|
|||
f345b6aa3c550dafc63de3e5a5c404691e782336
|
|
@ -0,0 +1 @@
|
|||
0b4be9f96edfd3dbcff5aa9b3f0914e86eb9cc51
|
|
@ -1 +0,0 @@
|
|||
7a74855e37124a27af36390c9d15abe33614129e
|
|
@ -0,0 +1 @@
|
|||
a5dcceb5bc017cee6ab5d3ee1943aca1ac6fe074
|
|
@ -1 +0,0 @@
|
|||
0e3df4b469465ef101254fdcbb08ebd8a19f1f9d
|
|
@ -0,0 +1 @@
|
|||
b59e7441f121da969bef8eef2c0c61743b4230a8
|
|
@ -1 +0,0 @@
|
|||
05d236149c99c860e6b627a8f78ea32918c108c3
|
|
@ -0,0 +1 @@
|
|||
46736dbb07b432f0a7c1b3080f62932c483e5cb9
|
|
@ -1 +0,0 @@
|
|||
d83e7e65eb268425f7bd5be2425d4a00b556bc47
|
|
@ -0,0 +1 @@
|
|||
ee203718d525da0c6258a51a5a32d877089fe5af
|
|
@ -1 +0,0 @@
|
|||
440a998b5bf99871bec4272a219de01b25751d5c
|
|
@ -0,0 +1 @@
|
|||
cf17a332d8e42a45e8f013d5df408f4391d2620a
|
|
@ -1 +0,0 @@
|
|||
2a5c031155fdfa743af321150c0dd654a6ea3c71
|
|
@ -0,0 +1 @@
|
|||
04832303d70502d2ece44501cb1716f42e24fe35
|
|
@ -1 +0,0 @@
|
|||
d021c9a461ff0f020d038ad5ecc5127973d4674a
|
|
@ -0,0 +1 @@
|
|||
639313e3a9573779b6a28b45a7f57fc1f73ffa46
|
|
@ -1 +0,0 @@
|
|||
9877a14c53e69b39fff2bf10d49a61171746d940
|
|
@ -0,0 +1 @@
|
|||
6144b493ba3588a638858d0058054758acc619b9
|
|
@ -1 +0,0 @@
|
|||
7d7e5101b46a120efa311509948c0d1f9bf30155
|
|
@ -0,0 +1 @@
|
|||
9d00c6b8bbbbb496aecd555406267fee9e0af914
|
|
@ -1 +0,0 @@
|
|||
5a4c11db96ae70b9048243cc530fcbc76faa0978
|
|
@ -0,0 +1 @@
|
|||
159cdb6d36845690cb1972d02cc0b472bb14b7f3
|
|
@ -1 +0,0 @@
|
|||
afb01af1450067b145ca2c1d737b5907288af560
|
|
@ -0,0 +1 @@
|
|||
af1dd0218d58990cca5c1592d9722e67d233c996
|
|
@ -1 +0,0 @@
|
|||
473f0221e0b2ea45940d8ae6dcf16e39c81b18c2
|
|
@ -20,15 +20,31 @@
|
|||
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class GetMappingsResponse extends ActionResponse {
|
||||
public class GetMappingsResponse extends ActionResponse implements ToXContentFragment {
|
||||
|
||||
private static final ParseField MAPPINGS = new ParseField("mappings");
|
||||
|
||||
private static final ObjectParser<GetMappingsResponse, Void> PARSER =
|
||||
new ObjectParser<GetMappingsResponse, Void>("get-mappings", false, GetMappingsResponse::new);
|
||||
|
||||
private ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = ImmutableOpenMap.of();
|
||||
|
||||
|
@ -77,4 +93,94 @@ public class GetMappingsResponse extends ActionResponse {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static GetMappingsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() == null) {
|
||||
parser.nextToken();
|
||||
}
|
||||
assert parser.currentToken() == XContentParser.Token.START_OBJECT;
|
||||
Map<String, Object> parts = parser.map();
|
||||
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> builder = new ImmutableOpenMap.Builder<>();
|
||||
for (Map.Entry<String, Object> entry : parts.entrySet()) {
|
||||
final String indexName = entry.getKey();
|
||||
assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass();
|
||||
final Map<String, Object> mapping = (Map<String, Object>) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName());
|
||||
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> typeBuilder = new ImmutableOpenMap.Builder<>();
|
||||
for (Map.Entry<String, Object> typeEntry : mapping.entrySet()) {
|
||||
final String typeName = typeEntry.getKey();
|
||||
assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " +
|
||||
typeEntry.getValue().getClass();
|
||||
final Map<String, Object> fieldMappings = (Map<String, Object>) typeEntry.getValue();
|
||||
MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings);
|
||||
typeBuilder.put(typeName, mmd);
|
||||
}
|
||||
builder.put(indexName, typeBuilder.build());
|
||||
}
|
||||
|
||||
return new GetMappingsResponse(builder.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return toXContent(builder, params, true);
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean includeTypeName) throws IOException {
|
||||
for (final ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> indexEntry : getMappings()) {
|
||||
builder.startObject(indexEntry.key);
|
||||
{
|
||||
if (includeTypeName == false) {
|
||||
MappingMetaData mappings = null;
|
||||
for (final ObjectObjectCursor<String, MappingMetaData> typeEntry : indexEntry.value) {
|
||||
if (typeEntry.key.equals("_default_") == false) {
|
||||
assert mappings == null;
|
||||
mappings = typeEntry.value;
|
||||
}
|
||||
}
|
||||
if (mappings == null) {
|
||||
// no mappings yet
|
||||
builder.startObject(MAPPINGS.getPreferredName()).endObject();
|
||||
} else {
|
||||
builder.field(MAPPINGS.getPreferredName(), mappings.sourceAsMap());
|
||||
}
|
||||
} else {
|
||||
builder.startObject(MAPPINGS.getPreferredName());
|
||||
{
|
||||
for (final ObjectObjectCursor<String, MappingMetaData> typeEntry : indexEntry.value) {
|
||||
builder.field(typeEntry.key, typeEntry.value.sourceAsMap());
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return mappings.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GetMappingsResponse other = (GetMappingsResponse) obj;
|
||||
return this.mappings.equals(other.mappings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,13 +56,13 @@ final class TranslogLeafReader extends LeafReader {
|
|||
private final Translog.Index operation;
|
||||
private static final FieldInfo FAKE_SOURCE_FIELD
|
||||
= new FieldInfo(SourceFieldMapper.NAME, 1, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(),
|
||||
0,0);
|
||||
0, 0, false);
|
||||
private static final FieldInfo FAKE_ROUTING_FIELD
|
||||
= new FieldInfo(RoutingFieldMapper.NAME, 2, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(),
|
||||
0,0);
|
||||
0, 0, false);
|
||||
private static final FieldInfo FAKE_ID_FIELD
|
||||
= new FieldInfo(IdFieldMapper.NAME, 3, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(),
|
||||
0,0);
|
||||
0, 0, false);
|
||||
private final Version indexVersionCreated;
|
||||
|
||||
TranslogLeafReader(Translog.Index operation, Version indexVersionCreated) {
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.lucene.search.NormsFieldExistsQuery;
|
|||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -175,7 +176,16 @@ public class TextFieldMapper extends FieldMapper {
|
|||
if (fieldType().isSearchable() == false) {
|
||||
throw new IllegalArgumentException("Cannot set index_prefixes on unindexed field [" + name() + "]");
|
||||
}
|
||||
if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
|
||||
// Copy the index options of the main field to allow phrase queries on
|
||||
// the prefix field.
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS) {
|
||||
// frequencies are not needed because prefix queries always use a constant score
|
||||
prefixFieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
} else {
|
||||
prefixFieldType.setIndexOptions(fieldType.indexOptions());
|
||||
}
|
||||
} else if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
|
||||
prefixFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
|
||||
}
|
||||
if (fieldType.storeTermVectorOffsets()) {
|
||||
|
|
|
@ -18,18 +18,28 @@
|
|||
*/
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -124,22 +134,67 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder<SpanMultiTer
|
|||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
Query subQuery = multiTermQueryBuilder.toQuery(context);
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
if (subQuery instanceof BoostQuery) {
|
||||
BoostQuery boostQuery = (BoostQuery) subQuery;
|
||||
subQuery = boostQuery.getQuery();
|
||||
boost = boostQuery.getBoost();
|
||||
while (true) {
|
||||
if (subQuery instanceof ConstantScoreQuery) {
|
||||
subQuery = ((ConstantScoreQuery) subQuery).getQuery();
|
||||
boost = 1;
|
||||
} else if (subQuery instanceof BoostQuery) {
|
||||
BoostQuery boostQuery = (BoostQuery) subQuery;
|
||||
subQuery = boostQuery.getQuery();
|
||||
boost *= boostQuery.getBoost();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
//no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here
|
||||
final SpanQuery spanQuery;
|
||||
// no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here
|
||||
assert subQuery instanceof SpanBoostQuery == false;
|
||||
if (subQuery instanceof MultiTermQuery == false) {
|
||||
throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() +" but was "
|
||||
+ subQuery.getClass().getName());
|
||||
if (subQuery instanceof TermQuery) {
|
||||
/**
|
||||
* Text fields that index prefixes can rewrite prefix queries
|
||||
* into term queries. See {@link TextFieldMapper.TextFieldType#prefixQuery}.
|
||||
*/
|
||||
if (multiTermQueryBuilder.getClass() != PrefixQueryBuilder.class) {
|
||||
throw new UnsupportedOperationException("unsupported inner query generated by " +
|
||||
multiTermQueryBuilder.getClass().getName() + ", should be " + MultiTermQuery.class.getName()
|
||||
+ " but was " + subQuery.getClass().getName());
|
||||
}
|
||||
if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_4_0)) {
|
||||
/**
|
||||
* Indices created in this version do not index positions on the prefix field
|
||||
* so we cannot use it to match positional queries. Instead, we explicitly create the prefix
|
||||
* query on the main field to avoid the rewrite.
|
||||
*/
|
||||
PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder;
|
||||
PrefixQuery prefixQuery = new PrefixQuery(new Term(prefixBuilder.fieldName(), prefixBuilder.value()));
|
||||
if (prefixBuilder.rewrite() != null) {
|
||||
MultiTermQuery.RewriteMethod rewriteMethod =
|
||||
QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE);
|
||||
prefixQuery.setRewriteMethod(rewriteMethod);
|
||||
}
|
||||
spanQuery = new SpanMultiTermQueryWrapper<>(prefixQuery);
|
||||
} else {
|
||||
String origFieldName = ((PrefixQueryBuilder) multiTermQueryBuilder).fieldName();
|
||||
SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm());
|
||||
/**
|
||||
* Prefixes are indexed in a different field so we mask the term query with the original field
|
||||
* name. This is required because span_near and span_or queries don't work across different field.
|
||||
* The masking is safe because the prefix field is indexed using the same content than the original field
|
||||
* and the prefix analyzer preserves positions.
|
||||
*/
|
||||
spanQuery = new FieldMaskingSpanQuery(spanTermQuery, origFieldName);
|
||||
}
|
||||
} else {
|
||||
if (subQuery instanceof MultiTermQuery == false) {
|
||||
throw new UnsupportedOperationException("unsupported inner query, should be "
|
||||
+ MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName());
|
||||
}
|
||||
spanQuery = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery);
|
||||
}
|
||||
SpanQuery wrapper = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery);
|
||||
if (boost != AbstractQueryBuilder.DEFAULT_BOOST) {
|
||||
wrapper = new SpanBoostQuery(wrapper, boost);
|
||||
return new SpanBoostQuery(spanQuery, boost);
|
||||
}
|
||||
return wrapper;
|
||||
return spanQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -352,38 +352,41 @@ public class MatchQuery {
|
|||
|
||||
@Override
|
||||
protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException {
|
||||
IllegalStateException e = checkForPositions(field);
|
||||
if (e != null) {
|
||||
try {
|
||||
checkForPositions(field);
|
||||
Query query = mapper.phraseQuery(field, stream, slop, enablePositionIncrements);
|
||||
if (query instanceof PhraseQuery) {
|
||||
// synonyms that expand to multiple terms can return a phrase query.
|
||||
return blendPhraseQuery((PhraseQuery) query, mapper);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
catch (IllegalArgumentException | IllegalStateException e) {
|
||||
if (lenient) {
|
||||
return newLenientFieldQuery(field, e);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
Query query = mapper.phraseQuery(field, stream, slop, enablePositionIncrements);
|
||||
if (query instanceof PhraseQuery) {
|
||||
// synonyms that expand to multiple terms can return a phrase query.
|
||||
return blendPhraseQuery((PhraseQuery) query, mapper);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException {
|
||||
IllegalStateException e = checkForPositions(field);
|
||||
if (e != null) {
|
||||
try {
|
||||
checkForPositions(field);
|
||||
return mapper.multiPhraseQuery(field, stream, slop, enablePositionIncrements);
|
||||
}
|
||||
catch (IllegalArgumentException | IllegalStateException e) {
|
||||
if (lenient) {
|
||||
return newLenientFieldQuery(field, e);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return mapper.multiPhraseQuery(field, stream, slop, enablePositionIncrements);
|
||||
}
|
||||
|
||||
private IllegalStateException checkForPositions(String field) {
|
||||
private void checkForPositions(String field) {
|
||||
if (hasPositions(mapper) == false) {
|
||||
return new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery");
|
||||
throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.indices.TypeMissingException;
|
||||
|
@ -83,6 +84,7 @@ public class RestGetMappingAction extends BaseRestHandler {
|
|||
final GetMappingsRequest getMappingsRequest = new GetMappingsRequest();
|
||||
getMappingsRequest.indices(indices).types(types);
|
||||
getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions()));
|
||||
getMappingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout()));
|
||||
getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local()));
|
||||
return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestBuilderListener<GetMappingsResponse>(channel) {
|
||||
@Override
|
||||
|
@ -129,54 +131,17 @@ public class RestGetMappingAction extends BaseRestHandler {
|
|||
status = RestStatus.OK;
|
||||
} else {
|
||||
status = RestStatus.NOT_FOUND;
|
||||
final String message;
|
||||
if (difference.size() == 1) {
|
||||
message = String.format(Locale.ROOT, "type [%s] missing", toNamesString(difference.iterator().next()));
|
||||
} else {
|
||||
message = String.format(Locale.ROOT, "types [%s] missing", toNamesString(difference.toArray(new String[0])));
|
||||
}
|
||||
final String message = String.format(Locale.ROOT, "type" + (difference.size() == 1 ? "" : "s") +
|
||||
" [%s] missing", Strings.collectionToCommaDelimitedString(difference));
|
||||
builder.field("error", message);
|
||||
builder.field("status", status.getStatus());
|
||||
}
|
||||
|
||||
for (final ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> indexEntry : mappingsByIndex) {
|
||||
builder.startObject(indexEntry.key);
|
||||
{
|
||||
if (includeTypeName == false) {
|
||||
MappingMetaData mappings = null;
|
||||
for (final ObjectObjectCursor<String, MappingMetaData> typeEntry : indexEntry.value) {
|
||||
if (typeEntry.key.equals("_default_") == false) {
|
||||
assert mappings == null;
|
||||
mappings = typeEntry.value;
|
||||
}
|
||||
}
|
||||
if (mappings == null) {
|
||||
// no mappings yet
|
||||
builder.startObject("mappings").endObject();
|
||||
} else {
|
||||
builder.field("mappings", mappings.sourceAsMap());
|
||||
}
|
||||
} else {
|
||||
builder.startObject("mappings");
|
||||
{
|
||||
for (final ObjectObjectCursor<String, MappingMetaData> typeEntry : indexEntry.value) {
|
||||
builder.field(typeEntry.key, typeEntry.value.sourceAsMap());
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS, includeTypeName);
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
return new BytesRestResponse(status, builder);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static String toNamesString(final String... names) {
|
||||
return Arrays.stream(names).collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
|
||||
|
@ -71,9 +71,9 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
SearchContext context = highlighterContext.context;
|
||||
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
|
||||
FieldMapper mapper = highlighterContext.mapper;
|
||||
MappedFieldType fieldType = highlighterContext.fieldType;
|
||||
|
||||
if (canHighlight(mapper) == false) {
|
||||
if (canHighlight(fieldType) == false) {
|
||||
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName +
|
||||
"] should be indexed with term vector with position offsets to be used with fast vector highlighter");
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
HighlighterEntry cache = (HighlighterEntry) hitContext.cache().get(CACHE_KEY);
|
||||
|
||||
try {
|
||||
MapperHighlightEntry entry = cache.mappers.get(mapper);
|
||||
FieldHighlightEntry entry = cache.fields.get(fieldType);
|
||||
if (entry == null) {
|
||||
FragListBuilder fragListBuilder;
|
||||
BaseFragmentsBuilder fragmentsBuilder;
|
||||
|
@ -97,37 +97,37 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
if (field.fieldOptions().numberOfFragments() == 0) {
|
||||
fragListBuilder = new SingleFragListBuilder();
|
||||
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
|
||||
if (!forceSource && fieldType.stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context,
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, context,
|
||||
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ?
|
||||
new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset());
|
||||
if (field.fieldOptions().scoreOrdered()) {
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
if (!forceSource && fieldType.stored()) {
|
||||
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context,
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, context,
|
||||
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
|
||||
if (!forceSource && fieldType.stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder =
|
||||
new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(),
|
||||
new SourceSimpleFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
}
|
||||
}
|
||||
fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue);
|
||||
entry = new MapperHighlightEntry();
|
||||
entry = new FieldHighlightEntry();
|
||||
if (field.fieldOptions().requireFieldMatch()) {
|
||||
/**
|
||||
* we use top level reader to rewrite the query against all readers,
|
||||
|
@ -152,7 +152,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter();
|
||||
}
|
||||
CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter());
|
||||
cache.mappers.put(mapper, entry);
|
||||
cache.fields.put(fieldType, entry);
|
||||
}
|
||||
final FieldQuery fieldQuery;
|
||||
if (field.fieldOptions().requireFieldMatch()) {
|
||||
|
@ -173,12 +173,12 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
// Only send matched fields if they were requested to save time.
|
||||
if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) {
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
|
||||
mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize,
|
||||
fieldType.name(), field.fieldOptions().matchedFields(), fragmentCharSize,
|
||||
numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), encoder);
|
||||
} else {
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
|
||||
mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
|
||||
fieldType.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
|
||||
entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
|
||||
}
|
||||
|
||||
|
@ -193,7 +193,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/);
|
||||
fieldFragList.add(0, noMatchSize, Collections.<WeightedPhraseInfo>emptyList());
|
||||
fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(),
|
||||
mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(),
|
||||
fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), encoder);
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
|
||||
|
@ -209,9 +209,10 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean canHighlight(FieldMapper fieldMapper) {
|
||||
return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets()
|
||||
&& fieldMapper.fieldType().storeTermVectorPositions();
|
||||
public boolean canHighlight(MappedFieldType fieldType) {
|
||||
return fieldType.storeTermVectors()
|
||||
&& fieldType.storeTermVectorOffsets()
|
||||
&& fieldType.storeTermVectorPositions();
|
||||
}
|
||||
|
||||
private static BoundaryScanner getBoundaryScanner(Field field) {
|
||||
|
@ -244,7 +245,7 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
}
|
||||
}
|
||||
|
||||
private class MapperHighlightEntry {
|
||||
private class FieldHighlightEntry {
|
||||
public FragListBuilder fragListBuilder;
|
||||
public FragmentsBuilder fragmentsBuilder;
|
||||
public FieldQuery noFieldMatchFieldQuery;
|
||||
|
@ -253,6 +254,6 @@ public class FastVectorHighlighter implements Highlighter {
|
|||
|
||||
private class HighlighterEntry {
|
||||
public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh;
|
||||
public Map<FieldMapper, MapperHighlightEntry> mappers = new HashMap<>();
|
||||
public Map<MappedFieldType, FieldHighlightEntry> fields = new HashMap<>();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.lucene.util.CollectionUtil;
|
|||
import org.elasticsearch.index.analysis.CustomAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
@ -47,10 +47,10 @@ public final class FragmentBuilderHelper {
|
|||
* Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to
|
||||
* {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter}
|
||||
*/
|
||||
public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) {
|
||||
public static WeightedFragInfo fixWeightedFragInfo(MappedFieldType fieldType, Field[] values, WeightedFragInfo fragInfo) {
|
||||
assert fragInfo != null : "FragInfo must not be null";
|
||||
assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name();
|
||||
if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(mapper.fieldType().indexAnalyzer())) {
|
||||
assert fieldType.name().equals(values[0].name()) : "Expected MappedFieldType for field " + values[0].name();
|
||||
if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(fieldType.indexAnalyzer())) {
|
||||
/* This is a special case where broken analysis like WDF is used for term-vector creation at index-time
|
||||
* which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort
|
||||
* the fragments based on their offsets rather than using soley the positions as it is done in
|
||||
|
|
|
@ -24,18 +24,16 @@ import org.elasticsearch.common.component.AbstractComponent;
|
|||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
|
@ -71,8 +69,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
|
||||
boolean fieldNameContainsWildcards = field.field().contains("*");
|
||||
for (String fieldName : fieldNamesToHighlight) {
|
||||
FieldMapper fieldMapper = getMapperForField(fieldName, context, hitContext);
|
||||
if (fieldMapper == null) {
|
||||
MappedFieldType fieldType = context.mapperService().fullName(fieldName);
|
||||
if (fieldType == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -85,8 +83,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
// If the field was explicitly given we assume that whoever issued the query knew
|
||||
// what they were doing and try to highlight anyway.
|
||||
if (fieldNameContainsWildcards) {
|
||||
if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false &&
|
||||
fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) {
|
||||
if (fieldType.typeName().equals(TextFieldMapper.CONTENT_TYPE) == false &&
|
||||
fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -104,10 +102,10 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
if (highlightQuery == null) {
|
||||
highlightQuery = context.parsedQuery().query();
|
||||
}
|
||||
HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context,
|
||||
hitContext, highlightQuery);
|
||||
HighlighterContext highlighterContext = new HighlighterContext(fieldName,
|
||||
field, fieldType, context, hitContext, highlightQuery);
|
||||
|
||||
if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) {
|
||||
if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) {
|
||||
// if several fieldnames matched the wildcard then we want to skip those that we cannot highlight
|
||||
continue;
|
||||
}
|
||||
|
@ -119,10 +117,4 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
}
|
||||
hitContext.hit().highlightFields(highlightFields);
|
||||
}
|
||||
|
||||
private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) {
|
||||
DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().getType());
|
||||
// TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service
|
||||
return documentMapper.mappers().smartNameFieldMapper(fieldName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.lucene.search.highlight.DefaultEncoder;
|
|||
import org.apache.lucene.search.highlight.Encoder;
|
||||
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
|
||||
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
@ -46,15 +46,17 @@ public final class HighlightUtils {
|
|||
/**
|
||||
* Load field values for highlighting.
|
||||
*/
|
||||
public static List<Object> loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext,
|
||||
FetchSubPhase.HitContext hitContext) throws IOException {
|
||||
public static List<Object> loadFieldValues(SearchContextHighlight.Field field,
|
||||
MappedFieldType fieldType,
|
||||
SearchContext searchContext,
|
||||
FetchSubPhase.HitContext hitContext) throws IOException {
|
||||
//percolator needs to always load from source, thus it sets the global force source to true
|
||||
boolean forceSource = searchContext.highlight().forceSource(field);
|
||||
List<Object> textsToHighlight;
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().name()), false);
|
||||
if (!forceSource && fieldType.stored()) {
|
||||
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false);
|
||||
hitContext.reader().document(hitContext.docId(), fieldVisitor);
|
||||
textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().name());
|
||||
textsToHighlight = fieldVisitor.fields().get(fieldType.name());
|
||||
if (textsToHighlight == null) {
|
||||
// Can happen if the document doesn't have the field to highlight
|
||||
textsToHighlight = Collections.emptyList();
|
||||
|
@ -62,7 +64,7 @@ public final class HighlightUtils {
|
|||
} else {
|
||||
SourceLookup sourceLookup = searchContext.lookup().source();
|
||||
sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId());
|
||||
textsToHighlight = sourceLookup.extractRawValues(mapper.fieldType().name());
|
||||
textsToHighlight = sourceLookup.extractRawValues(fieldType.name());
|
||||
}
|
||||
assert textsToHighlight != null;
|
||||
return textsToHighlight;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* Highlights a search result.
|
||||
|
@ -27,5 +27,5 @@ public interface Highlighter {
|
|||
|
||||
HighlightField highlight(HighlighterContext highlighterContext);
|
||||
|
||||
boolean canHighlight(FieldMapper fieldMapper);
|
||||
boolean canHighlight(MappedFieldType fieldType);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -27,16 +27,20 @@ public class HighlighterContext {
|
|||
|
||||
public final String fieldName;
|
||||
public final SearchContextHighlight.Field field;
|
||||
public final FieldMapper mapper;
|
||||
public final MappedFieldType fieldType;
|
||||
public final SearchContext context;
|
||||
public final FetchSubPhase.HitContext hitContext;
|
||||
public final Query query;
|
||||
|
||||
public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper mapper, SearchContext context,
|
||||
FetchSubPhase.HitContext hitContext, Query query) {
|
||||
public HighlighterContext(String fieldName,
|
||||
SearchContextHighlight.Field field,
|
||||
MappedFieldType fieldType,
|
||||
SearchContext context,
|
||||
FetchSubPhase.HitContext hitContext,
|
||||
Query query) {
|
||||
this.fieldName = fieldName;
|
||||
this.field = field;
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
this.context = context;
|
||||
this.hitContext = hitContext;
|
||||
this.query = query;
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.lucene.util.CollectionUtil;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -59,22 +59,21 @@ public class PlainHighlighter implements Highlighter {
|
|||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
SearchContext context = highlighterContext.context;
|
||||
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
|
||||
FieldMapper mapper = highlighterContext.mapper;
|
||||
MappedFieldType fieldType = highlighterContext.fieldType;
|
||||
|
||||
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
|
||||
|
||||
if (!hitContext.cache().containsKey(CACHE_KEY)) {
|
||||
Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> mappers = new HashMap<>();
|
||||
hitContext.cache().put(CACHE_KEY, mappers);
|
||||
hitContext.cache().put(CACHE_KEY, new HashMap<>());
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> cache =
|
||||
(Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);
|
||||
Map<MappedFieldType, org.apache.lucene.search.highlight.Highlighter> cache =
|
||||
(Map<MappedFieldType, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);
|
||||
|
||||
org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper);
|
||||
org.apache.lucene.search.highlight.Highlighter entry = cache.get(fieldType);
|
||||
if (entry == null) {
|
||||
QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query,
|
||||
field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null);
|
||||
field.fieldOptions().requireFieldMatch() ? fieldType.name() : null);
|
||||
queryScorer.setExpandMultiTermQuery(true);
|
||||
Fragmenter fragmenter;
|
||||
if (field.fieldOptions().numberOfFragments() == 0) {
|
||||
|
@ -96,21 +95,21 @@ public class PlainHighlighter implements Highlighter {
|
|||
// always highlight across all data
|
||||
entry.setMaxDocCharsToAnalyze(Integer.MAX_VALUE);
|
||||
|
||||
cache.put(mapper, entry);
|
||||
cache.put(fieldType, entry);
|
||||
}
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
|
||||
ArrayList<TextFragment> fragsList = new ArrayList<>();
|
||||
List<Object> textsToHighlight;
|
||||
Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), mapper.fieldType());
|
||||
Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType);
|
||||
final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset();
|
||||
|
||||
try {
|
||||
textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext);
|
||||
textsToHighlight = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext);
|
||||
|
||||
for (Object textToHighlight : textsToHighlight) {
|
||||
String text = convertFieldValue(mapper.fieldType(), textToHighlight);
|
||||
String text = convertFieldValue(fieldType, textToHighlight);
|
||||
if (text.length() > maxAnalyzedOffset) {
|
||||
throw new IllegalArgumentException(
|
||||
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
|
||||
|
@ -121,7 +120,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
"with unified or fvh highlighter is recommended!");
|
||||
}
|
||||
|
||||
try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) {
|
||||
try (TokenStream tokenStream = analyzer.tokenStream(fieldType.name(), text)) {
|
||||
if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) {
|
||||
// can't perform highlighting if the stream has no terms (binary token stream) or no offsets
|
||||
continue;
|
||||
|
@ -178,7 +177,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
String fieldContents = textsToHighlight.get(0).toString();
|
||||
int end;
|
||||
try {
|
||||
end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().name(), fieldContents);
|
||||
end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents);
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e);
|
||||
}
|
||||
|
@ -190,7 +189,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean canHighlight(FieldMapper fieldMapper) {
|
||||
public boolean canHighlight(MappedFieldType fieldType) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,24 +23,27 @@ import org.apache.lucene.search.highlight.Encoder;
|
|||
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
|
||||
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* Direct Subclass of Lucene's org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder
|
||||
* that corrects offsets for broken analysis chains.
|
||||
* Direct Subclass of Lucene's org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder
|
||||
* that corrects offsets for broken analysis chains.
|
||||
*/
|
||||
public class SimpleFragmentsBuilder extends org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder {
|
||||
protected final FieldMapper mapper;
|
||||
protected final MappedFieldType fieldType;
|
||||
|
||||
public SimpleFragmentsBuilder(FieldMapper mapper,
|
||||
String[] preTags, String[] postTags, BoundaryScanner boundaryScanner) {
|
||||
public SimpleFragmentsBuilder(MappedFieldType fieldType,
|
||||
String[] preTags,
|
||||
String[] postTags,
|
||||
BoundaryScanner boundaryScanner) {
|
||||
super(preTags, postTags, boundaryScanner);
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo,
|
||||
String[] preTags, String[] postTags, Encoder encoder ){
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo),
|
||||
preTags, postTags, encoder);
|
||||
WeightedFragInfo weightedFragInfo = FragmentBuilderHelper.fixWeightedFragInfo(fieldType, values, fragInfo);
|
||||
return super.makeFragment(buffer, index, values, weightedFragInfo, preTags, postTags, encoder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.highlight.Encoder;
|
|||
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
|
||||
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
|
||||
import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
|
@ -35,14 +35,17 @@ import java.util.List;
|
|||
|
||||
public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder {
|
||||
|
||||
private final FieldMapper mapper;
|
||||
private final MappedFieldType fieldType;
|
||||
|
||||
private final SearchContext searchContext;
|
||||
|
||||
public SourceScoreOrderFragmentsBuilder(FieldMapper mapper, SearchContext searchContext, String[] preTags, String[] postTags,
|
||||
public SourceScoreOrderFragmentsBuilder(MappedFieldType fieldType,
|
||||
SearchContext searchContext,
|
||||
String[] preTags,
|
||||
String[] postTags,
|
||||
BoundaryScanner boundaryScanner) {
|
||||
super(preTags, postTags, boundaryScanner);
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
this.searchContext = searchContext;
|
||||
}
|
||||
|
||||
|
@ -52,10 +55,10 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder
|
|||
SourceLookup sourceLookup = searchContext.lookup().source();
|
||||
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
|
||||
|
||||
List<Object> values = sourceLookup.extractRawValues(mapper.fieldType().name());
|
||||
List<Object> values = sourceLookup.extractRawValues(fieldType.name());
|
||||
Field[] fields = new Field[values.size()];
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED);
|
||||
fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED);
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
@ -63,7 +66,7 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder
|
|||
@Override
|
||||
protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo,
|
||||
String[] preTags, String[] postTags, Encoder encoder ){
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo),
|
||||
preTags, postTags, encoder);
|
||||
WeightedFragInfo weightedFragInfo = FragmentBuilderHelper.fixWeightedFragInfo(fieldType, values, fragInfo);
|
||||
return super.makeFragment(buffer, index, values, weightedFragInfo, preTags, postTags, encoder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.document.TextField;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
|
@ -34,9 +34,12 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
|
|||
|
||||
private final SearchContext searchContext;
|
||||
|
||||
public SourceSimpleFragmentsBuilder(FieldMapper mapper, SearchContext searchContext, String[] preTags, String[] postTags,
|
||||
public SourceSimpleFragmentsBuilder(MappedFieldType fieldType,
|
||||
SearchContext searchContext,
|
||||
String[] preTags,
|
||||
String[] postTags,
|
||||
BoundaryScanner boundaryScanner) {
|
||||
super(mapper, preTags, postTags, boundaryScanner);
|
||||
super(fieldType, preTags, postTags, boundaryScanner);
|
||||
this.searchContext = searchContext;
|
||||
}
|
||||
|
||||
|
@ -48,13 +51,13 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
|
|||
SourceLookup sourceLookup = searchContext.lookup().source();
|
||||
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
|
||||
|
||||
List<Object> values = sourceLookup.extractRawValues(mapper.fieldType().name());
|
||||
List<Object> values = sourceLookup.extractRawValues(fieldType.name());
|
||||
if (values.isEmpty()) {
|
||||
return EMPTY_FIELDS;
|
||||
}
|
||||
Field[] fields = new Field[values.size()];
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED);
|
||||
fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED);
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
|
|
@ -22,11 +22,11 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.highlight.Encoder;
|
||||
import org.apache.lucene.search.uhighlight.Snippet;
|
||||
import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner;
|
||||
import org.apache.lucene.search.uhighlight.CustomPassageFormatter;
|
||||
import org.apache.lucene.search.uhighlight.CustomSeparatorBreakIterator;
|
||||
import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter;
|
||||
import org.apache.lucene.search.uhighlight.Snippet;
|
||||
import org.apache.lucene.search.uhighlight.UnifiedHighlighter.OffsetSource;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
|
@ -52,13 +51,13 @@ import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTI
|
|||
|
||||
public class UnifiedHighlighter implements Highlighter {
|
||||
@Override
|
||||
public boolean canHighlight(FieldMapper fieldMapper) {
|
||||
public boolean canHighlight(MappedFieldType fieldType) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HighlightField highlight(HighlighterContext highlighterContext) {
|
||||
FieldMapper fieldMapper = highlighterContext.mapper;
|
||||
MappedFieldType fieldType = highlighterContext.fieldType;
|
||||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
SearchContext context = highlighterContext.context;
|
||||
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
|
||||
|
@ -72,15 +71,15 @@ public class UnifiedHighlighter implements Highlighter {
|
|||
try {
|
||||
|
||||
final Analyzer analyzer =
|
||||
getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldMapper.fieldType());
|
||||
List<Object> fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext);
|
||||
getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType);
|
||||
List<Object> fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext);
|
||||
fieldValues = fieldValues.stream()
|
||||
.map((s) -> convertFieldValue(fieldMapper.fieldType(), s))
|
||||
.map((s) -> convertFieldValue(fieldType, s))
|
||||
.collect(Collectors.toList());
|
||||
final IndexSearcher searcher = new IndexSearcher(hitContext.reader());
|
||||
final CustomUnifiedHighlighter highlighter;
|
||||
final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR);
|
||||
final OffsetSource offsetSource = getOffsetSource(fieldMapper.fieldType());
|
||||
final OffsetSource offsetSource = getOffsetSource(fieldType);
|
||||
if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValue.length() > maxAnalyzedOffset)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.action.admin.cluster.settings;
|
|||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -29,7 +30,8 @@ import org.elasticsearch.test.XContentTestUtils;
|
|||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;;
|
||||
|
||||
public class ClusterUpdateSettingsRequestTests extends ESTestCase {
|
||||
|
||||
|
@ -51,10 +53,10 @@ public class ClusterUpdateSettingsRequestTests extends ESTestCase {
|
|||
String unsupportedField = "unsupported_field";
|
||||
BytesReference mutated = BytesReference.bytes(XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes,
|
||||
Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10)));
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||
XContentParseException iae = expectThrows(XContentParseException.class,
|
||||
() -> ClusterUpdateSettingsRequest.fromXContent(createParser(xContentType.xContent(), mutated)));
|
||||
assertThat(iae.getMessage(),
|
||||
equalTo("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found"));
|
||||
containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found"));
|
||||
} else {
|
||||
XContentParser parser = createParser(xContentType.xContent(), originalBytes);
|
||||
ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser);
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase<GetMappingsResponse> {
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testCheckEqualsAndHashCode() {
|
||||
GetMappingsResponse resp = createTestInstance();
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(resp, r -> new GetMappingsResponse(r.mappings()), GetMappingsResponseTests::mutate);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetMappingsResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return GetMappingsResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetMappingsResponse createBlankInstance() {
|
||||
return new GetMappingsResponse();
|
||||
}
|
||||
|
||||
private static GetMappingsResponse mutate(GetMappingsResponse original) throws IOException {
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> builder = ImmutableOpenMap.builder(original.mappings());
|
||||
String indexKey = original.mappings().keys().iterator().next().value;
|
||||
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> typeBuilder = ImmutableOpenMap.builder(original.mappings().get(indexKey));
|
||||
final String typeKey;
|
||||
Iterator<ObjectCursor<String>> iter = original.mappings().get(indexKey).keys().iterator();
|
||||
if (iter.hasNext()) {
|
||||
typeKey = iter.next().value;
|
||||
} else {
|
||||
typeKey = "new-type";
|
||||
}
|
||||
|
||||
typeBuilder.put(typeKey, new MappingMetaData("type-" + randomAlphaOfLength(6), randomFieldMapping()));
|
||||
|
||||
builder.put(indexKey, typeBuilder.build());
|
||||
return new GetMappingsResponse(builder.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throws IOException {
|
||||
return mutate(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetMappingsResponse createTestInstance() {
|
||||
// rarely have no types
|
||||
int typeCount = rarely() ? 0 : scaledRandomIntBetween(1, 3);
|
||||
List<MappingMetaData> typeMappings = new ArrayList<>(typeCount);
|
||||
|
||||
for (int i = 0; i < typeCount; i++) {
|
||||
Map<String, Object> mappings = new HashMap<>();
|
||||
if (rarely() == false) { // rarely have no fields
|
||||
mappings.put("field-" + i, randomFieldMapping());
|
||||
if (randomBoolean()) {
|
||||
mappings.put("field2-" + i, randomFieldMapping());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
MappingMetaData mmd = new MappingMetaData("type-" + randomAlphaOfLength(5), mappings);
|
||||
typeMappings.add(mmd);
|
||||
} catch (IOException e) {
|
||||
fail("shouldn't have failed " + e);
|
||||
}
|
||||
}
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> typeBuilder = ImmutableOpenMap.builder();
|
||||
typeMappings.forEach(mmd -> typeBuilder.put(mmd.type(), mmd));
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexBuilder = ImmutableOpenMap.builder();
|
||||
indexBuilder.put("index-" + randomAlphaOfLength(5), typeBuilder.build());
|
||||
GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build());
|
||||
logger.debug("--> created: {}", resp);
|
||||
return resp;
|
||||
}
|
||||
|
||||
// Not meant to be exhaustive
|
||||
private static Map<String, Object> randomFieldMapping() {
|
||||
Map<String, Object> mappings = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
Map<String, Object> regularMapping = new HashMap<>();
|
||||
regularMapping.put("type", randomBoolean() ? "text" : "keyword");
|
||||
regularMapping.put("index", "analyzed");
|
||||
regularMapping.put("analyzer", "english");
|
||||
return regularMapping;
|
||||
} else if (randomBoolean()) {
|
||||
Map<String, Object> numberMapping = new HashMap<>();
|
||||
numberMapping.put("type", randomFrom("integer", "float", "long", "double"));
|
||||
numberMapping.put("index", Objects.toString(randomBoolean()));
|
||||
return numberMapping;
|
||||
} else if (randomBoolean()) {
|
||||
Map<String, Object> objMapping = new HashMap<>();
|
||||
objMapping.put("type", "object");
|
||||
objMapping.put("dynamic", "strict");
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
Map<String, Object> props1 = new HashMap<>();
|
||||
props1.put("type", randomFrom("text", "keyword"));
|
||||
props1.put("analyzer", "keyword");
|
||||
properties.put("subtext", props1);
|
||||
Map<String, Object> props2 = new HashMap<>();
|
||||
props2.put("type", "object");
|
||||
Map<String, Object> prop2properties = new HashMap<>();
|
||||
Map<String, Object> props3 = new HashMap<>();
|
||||
props3.put("type", "integer");
|
||||
props3.put("index", "false");
|
||||
prop2properties.put("subsubfield", props3);
|
||||
props2.put("properties", prop2properties);
|
||||
objMapping.put("properties", properties);
|
||||
return objMapping;
|
||||
} else {
|
||||
Map<String, Object> plainMapping = new HashMap<>();
|
||||
plainMapping.put("type", "keyword");
|
||||
return plainMapping;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -70,6 +71,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
private UpdateHelper updateHelper;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -283,8 +285,8 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.field("unknown_field", "test")
|
||||
.endObject());
|
||||
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser));
|
||||
assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage());
|
||||
XContentParseException ex = expectThrows(XContentParseException.class, () -> request.fromXContent(contentParser));
|
||||
assertEquals("[1:2] [UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage());
|
||||
|
||||
UpdateRequest request2 = new UpdateRequest("test", "type", "1");
|
||||
XContentParser unknownObject = createParser(XContentFactory.jsonBuilder()
|
||||
|
@ -294,8 +296,8 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.field("count", 1)
|
||||
.endObject()
|
||||
.endObject());
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject));
|
||||
assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage());
|
||||
ex = expectThrows(XContentParseException.class, () -> request2.fromXContent(unknownObject));
|
||||
assertEquals("[1:76] [UpdateRequest] unknown field [params], parser not found", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testFetchSourceParsing() throws Exception {
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.search.PrefixQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -638,7 +639,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("index_options", "positions")
|
||||
.field("index_options", "freqs")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
@ -649,6 +650,27 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertFalse(ft.storeTermVectors());
|
||||
}
|
||||
|
||||
{
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("index_options", "positions")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix");
|
||||
FieldType ft = prefix.fieldType;
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
|
||||
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
|
||||
} else {
|
||||
assertEquals(IndexOptions.DOCS, ft.indexOptions());
|
||||
}
|
||||
assertFalse(ft.storeTermVectors());
|
||||
}
|
||||
|
||||
{
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
|
@ -662,7 +684,11 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix");
|
||||
FieldType ft = prefix.fieldType;
|
||||
assertEquals(IndexOptions.DOCS, ft.indexOptions());
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
|
||||
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
|
||||
} else {
|
||||
assertEquals(IndexOptions.DOCS, ft.indexOptions());
|
||||
}
|
||||
assertTrue(ft.storeTermVectorOffsets());
|
||||
}
|
||||
|
||||
|
@ -679,7 +705,11 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix");
|
||||
FieldType ft = prefix.fieldType;
|
||||
assertEquals(IndexOptions.DOCS, ft.indexOptions());
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
|
||||
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
|
||||
} else {
|
||||
assertEquals(IndexOptions.DOCS, ft.indexOptions());
|
||||
}
|
||||
assertFalse(ft.storeTermVectorOffsets());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
|
||||
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(fieldName, value);
|
||||
|
||||
if (randomBoolean()) {
|
||||
if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) {
|
||||
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
|
||||
}
|
||||
|
||||
|
@ -99,15 +99,6 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
.or(instanceOf(IndexOrDocValuesQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Overridden to allow for annotating with @AwaitsFix. Please remove this method after fixing.
|
||||
*/
|
||||
@Override
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31061")
|
||||
public void testToQuery() throws IOException {
|
||||
super.testToQuery();
|
||||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchPhrasePrefixQueryBuilder(null, "value"));
|
||||
assertEquals("[match_phrase_prefix] requires fieldName", e.getMessage());
|
||||
|
@ -127,6 +118,12 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
|
||||
}
|
||||
|
||||
public void testPhraseOnFieldWithNoTerms() {
|
||||
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(DATE_FIELD_NAME, "three term phrase");
|
||||
matchQuery.analyzer("whitespace");
|
||||
expectThrows(IllegalArgumentException.class, () -> matchQuery.doToQuery(createShardContext()));
|
||||
}
|
||||
|
||||
public void testPhrasePrefixMatchQuery() throws IOException {
|
||||
String json1 = "{\n" +
|
||||
" \"match_phrase_prefix\" : {\n" +
|
||||
|
|
|
@ -64,7 +64,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
|
||||
MatchPhraseQueryBuilder matchQuery = new MatchPhraseQueryBuilder(fieldName, value);
|
||||
|
||||
if (randomBoolean()) {
|
||||
if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) {
|
||||
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
|
||||
}
|
||||
|
||||
|
@ -107,15 +107,6 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
.or(instanceOf(IndexOrDocValuesQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Overridden to allow for annotating with @AwaitsFix. Please remove this method after fixing.
|
||||
*/
|
||||
@Override
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31061")
|
||||
public void testToQuery() throws IOException {
|
||||
super.testToQuery();
|
||||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchPhraseQueryBuilder(null, "value"));
|
||||
assertEquals("[match_phrase] requires fieldName", e.getMessage());
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Seed;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
|
|
|
@ -22,24 +22,46 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
|
||||
public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMultiTermQueryBuilder> {
|
||||
@Override
|
||||
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties")
|
||||
.startObject("prefix_field")
|
||||
.field("type", "text")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject();
|
||||
|
||||
mapperService.merge("_doc",
|
||||
new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SpanMultiTermQueryBuilder doCreateTestQueryBuilder() {
|
||||
MultiTermQueryBuilder multiTermQueryBuilder = RandomQueryBuilder.createMultiTermQuery(random());
|
||||
|
@ -62,14 +84,67 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||
BoostQuery boostQuery = (BoostQuery) multiTermQuery;
|
||||
multiTermQuery = boostQuery.getQuery();
|
||||
}
|
||||
assertThat(multiTermQuery, instanceOf(MultiTermQuery.class));
|
||||
assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery()));
|
||||
assertThat(multiTermQuery, either(instanceOf(MultiTermQuery.class)).or(instanceOf(TermQuery.class)));
|
||||
assertThat(spanMultiTermQueryWrapper.getWrappedQuery(),
|
||||
equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery()));
|
||||
}
|
||||
|
||||
public void testIllegalArgument() {
|
||||
expectThrows(IllegalArgumentException.class, () -> new SpanMultiTermQueryBuilder((MultiTermQueryBuilder) null));
|
||||
}
|
||||
|
||||
private static class TermMultiTermQueryBuilder implements MultiTermQueryBuilder {
|
||||
@Override
|
||||
public Query toQuery(QueryShardContext context) throws IOException {
|
||||
return new TermQuery(new Term("foo", "bar"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query toFilter(QueryShardContext context) throws IOException {
|
||||
return toQuery(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryBuilder queryName(String queryName) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String queryName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public float boost() {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryBuilder boost(float boost) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test checks that we throw an {@link UnsupportedOperationException} if the query wrapped
|
||||
* by {@link SpanMultiTermQueryBuilder} does not generate a lucene {@link MultiTermQuery}.
|
||||
|
@ -77,69 +152,70 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||
* to a date.
|
||||
*/
|
||||
public void testUnsupportedInnerQueryType() throws IOException {
|
||||
MultiTermQueryBuilder query = new MultiTermQueryBuilder() {
|
||||
@Override
|
||||
public Query toQuery(QueryShardContext context) throws IOException {
|
||||
return new TermQuery(new Term("foo", "bar"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query toFilter(QueryShardContext context) throws IOException {
|
||||
return toQuery(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryBuilder queryName(String queryName) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String queryName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public float boost() {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryBuilder boost(float boost) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "foo";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
|
||||
}
|
||||
};
|
||||
MultiTermQueryBuilder query = new TermMultiTermQueryBuilder();
|
||||
SpanMultiTermQueryBuilder spamMultiTermQuery = new SpanMultiTermQueryBuilder(query);
|
||||
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class,
|
||||
() -> spamMultiTermQuery.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("unsupported inner query, should be " + MultiTermQuery.class.getName()));
|
||||
assertThat(e.getMessage(), containsString("unsupported inner query generated by " + TermMultiTermQueryBuilder.class.getName() +
|
||||
", should be " + MultiTermQuery.class.getName()));
|
||||
}
|
||||
|
||||
public void testToQueryInnerSpanMultiTerm() throws IOException {
|
||||
|
||||
Query query = new SpanOrQueryBuilder(createTestQueryBuilder()).toQuery(createShardContext());
|
||||
//verify that the result is still a span query, despite the boost that might get set (SpanBoostQuery rather than BoostQuery)
|
||||
assertThat(query, instanceOf(SpanQuery.class));
|
||||
}
|
||||
|
||||
public void testToQueryInnerTermQuery() throws IOException {
|
||||
final QueryShardContext context = createShardContext();
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
|
||||
Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo"))
|
||||
.toQuery(context);
|
||||
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
|
||||
FieldMaskingSpanQuery fieldSpanQuery = (FieldMaskingSpanQuery) query;
|
||||
assertThat(fieldSpanQuery.getField(), equalTo("prefix_field"));
|
||||
assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class));
|
||||
SpanTermQuery spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery();
|
||||
assertThat(spanTermQuery.getTerm().text(), equalTo("foo"));
|
||||
|
||||
query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo"))
|
||||
.boost(2.0f)
|
||||
.toQuery(context);
|
||||
assertThat(query, instanceOf(SpanBoostQuery.class));
|
||||
SpanBoostQuery boostQuery = (SpanBoostQuery) query;
|
||||
assertThat(boostQuery.getBoost(), equalTo(2.0f));
|
||||
assertThat(boostQuery.getQuery(), instanceOf(FieldMaskingSpanQuery.class));
|
||||
fieldSpanQuery = (FieldMaskingSpanQuery) boostQuery.getQuery();
|
||||
assertThat(fieldSpanQuery.getField(), equalTo("prefix_field"));
|
||||
assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class));
|
||||
spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery();
|
||||
assertThat(spanTermQuery.getTerm().text(), equalTo("foo"));
|
||||
} else {
|
||||
Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo"))
|
||||
.toQuery(context);
|
||||
assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class));
|
||||
SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query;
|
||||
assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class));
|
||||
PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery();
|
||||
assertThat(prefixQuery.getField(), equalTo("prefix_field"));
|
||||
assertThat(prefixQuery.getPrefix().text(), equalTo("foo"));
|
||||
|
||||
query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo"))
|
||||
.boost(2.0f)
|
||||
.toQuery(context);
|
||||
assertThat(query, instanceOf(SpanBoostQuery.class));
|
||||
SpanBoostQuery boostQuery = (SpanBoostQuery) query;
|
||||
assertThat(boostQuery.getBoost(), equalTo(2.0f));
|
||||
assertThat(boostQuery.getQuery(), instanceOf(SpanMultiTermQueryWrapper.class));
|
||||
wrapper = (SpanMultiTermQueryWrapper) boostQuery.getQuery();
|
||||
assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class));
|
||||
prefixQuery = (PrefixQuery) wrapper.getWrappedQuery();
|
||||
assertThat(prefixQuery.getField(), equalTo("prefix_field"));
|
||||
assertThat(prefixQuery.getPrefix().text(), equalTo("foo"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
|
|
|
@ -19,11 +19,7 @@
|
|||
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlighterContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -68,7 +64,7 @@ public class CustomHighlighter implements Highlighter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean canHighlight(FieldMapper fieldMapper) {
|
||||
public boolean canHighlight(MappedFieldType fieldType) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -158,10 +158,10 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
*/
|
||||
public void testUnknownArrayNameExpection() throws IOException {
|
||||
{
|
||||
IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" +
|
||||
XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" +
|
||||
" \"bad_fieldname\" : [ \"field1\" 1 \"field2\" ]\n" +
|
||||
"}\n");
|
||||
assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -174,7 +174,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
"}\n");
|
||||
assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]"));
|
||||
assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]"));
|
||||
assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,10 +188,10 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
*/
|
||||
public void testUnknownFieldnameExpection() throws IOException {
|
||||
{
|
||||
IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" +
|
||||
XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" +
|
||||
" \"bad_fieldname\" : \"value\"\n" +
|
||||
"}\n");
|
||||
assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -204,7 +204,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
"}\n");
|
||||
assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]"));
|
||||
assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]"));
|
||||
assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -213,10 +213,10 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
*/
|
||||
public void testUnknownObjectFieldnameExpection() throws IOException {
|
||||
{
|
||||
IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" +
|
||||
XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" +
|
||||
" \"bad_fieldname\" : { \"field\" : \"value\" }\n \n" +
|
||||
"}\n");
|
||||
assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -229,7 +229,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
"}\n");
|
||||
assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]"));
|
||||
assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]"));
|
||||
assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -170,6 +170,7 @@ public class QueryRescorerBuilderTests extends ESTestCase {
|
|||
|
||||
class AlwaysRewriteQueryBuilder extends MatchAllQueryBuilder {
|
||||
|
||||
@Override
|
||||
protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException {
|
||||
return new MatchAllQueryBuilder();
|
||||
}
|
||||
|
@ -254,8 +255,8 @@ public class QueryRescorerBuilderTests extends ESTestCase {
|
|||
"}\n";
|
||||
{
|
||||
XContentParser parser = createParser(rescoreElement);
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> RescorerBuilder.parseFromXContent(parser));
|
||||
assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser));
|
||||
assertEquals("[3:17] [query] unknown field [bad_fieldname], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
rescoreElement = "{\n" +
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.SortedNumericSortField;
|
|||
import org.apache.lucene.search.SortedSetSelector;
|
||||
import org.apache.lucene.search.SortedSetSortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
|
@ -309,8 +310,8 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FieldSortBuilder.fromXContent(parser, ""));
|
||||
assertEquals("[field_sort] unknown field [reverse], parser not found", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, ""));
|
||||
assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -383,7 +384,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
|
|||
}
|
||||
};
|
||||
sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery);
|
||||
FieldSortBuilder rewritten = (FieldSortBuilder) sortBuilder
|
||||
FieldSortBuilder rewritten = sortBuilder
|
||||
.rewrite(createMockShardContext());
|
||||
assertNotSame(rangeQuery, rewritten.getNestedFilter());
|
||||
}
|
||||
|
@ -400,7 +401,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
|
|||
}
|
||||
};
|
||||
sortBuilder.setNestedSort(new NestedSortBuilder("path").setFilter(rangeQuery));
|
||||
FieldSortBuilder rewritten = (FieldSortBuilder) sortBuilder
|
||||
FieldSortBuilder rewritten = sortBuilder
|
||||
.rewrite(createMockShardContext());
|
||||
assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter());
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -225,8 +224,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(parser, null));
|
||||
assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null));
|
||||
assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseBadFieldNameExceptionsOnStartObject() throws IOException {
|
||||
|
@ -237,8 +236,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(parser, null));
|
||||
assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||
XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null));
|
||||
assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseUnexpectedToken() throws IOException {
|
||||
|
@ -374,7 +373,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
}
|
||||
};
|
||||
sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery);
|
||||
ScriptSortBuilder rewritten = (ScriptSortBuilder) sortBuilder
|
||||
ScriptSortBuilder rewritten = sortBuilder
|
||||
.rewrite(createMockShardContext());
|
||||
assertNotSame(rangeQuery, rewritten.getNestedFilter());
|
||||
}
|
||||
|
@ -391,7 +390,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
|
|||
}
|
||||
};
|
||||
sortBuilder.setNestedSort(new NestedSortBuilder("path").setFilter(rangeQuery));
|
||||
ScriptSortBuilder rewritten = (ScriptSortBuilder) sortBuilder
|
||||
ScriptSortBuilder rewritten = sortBuilder
|
||||
.rewrite(createMockShardContext());
|
||||
assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter());
|
||||
}
|
||||
|
|
|
@ -132,8 +132,9 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
* To find the right position in the root query, we add a marker as `queryName` which
|
||||
* all query builders support. The added bogus field after that should trigger the exception.
|
||||
* Queries that allow arbitrary field names at this level need to override this test.
|
||||
* @throws IOException
|
||||
*/
|
||||
public void testUnknownField() {
|
||||
public void testUnknownField() throws IOException {
|
||||
String marker = "#marker#";
|
||||
QB testQuery;
|
||||
do {
|
||||
|
@ -141,9 +142,14 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
} while (testQuery.toString().contains(marker));
|
||||
testQuery.queryName(marker); // to find root query to add additional bogus field there
|
||||
String queryAsString = testQuery.toString().replace("\"" + marker + "\"", "\"" + marker + "\", \"bogusField\" : \"someValue\"");
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryAsString));
|
||||
// we'd like to see the offending field name here
|
||||
assertThat(e.getMessage(), containsString("bogusField"));
|
||||
try {
|
||||
parseQuery(queryAsString);
|
||||
fail("expected ParsingException or XContentParsingException");
|
||||
} catch (ParsingException | XContentParseException e) {
|
||||
// we'd like to see the offending field name here
|
||||
assertThat(e.getMessage(), containsString("bogusField"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -69,6 +69,11 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
* e.g. "-Dtests.rest.blacklist=get/10_basic/*"
|
||||
*/
|
||||
public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist";
|
||||
/**
|
||||
* We use tests.rest.blacklist in build files to blacklist tests; this property enables a user to add additional blacklisted tests on
|
||||
* top of the tests blacklisted in the build.
|
||||
*/
|
||||
public static final String REST_TESTS_BLACKLIST_ADDITIONS = "tests.rest.blacklist_additions";
|
||||
/**
|
||||
* Property that allows to control whether spec validation is enabled or not (default true).
|
||||
*/
|
||||
|
@ -125,6 +130,10 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
for (final String entry : blacklist) {
|
||||
blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
|
||||
}
|
||||
final String[] blacklistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null);
|
||||
for (final String entry : blacklistAdditions) {
|
||||
blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
|
||||
}
|
||||
}
|
||||
assert restTestExecutionContext != null;
|
||||
assert adminExecutionContext != null;
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
[role="xpack"]
|
||||
[[active-directory-realm]]
|
||||
=== Active Directory user authentication
|
||||
|
||||
You can configure {security} to communicate with Active Directory to authenticate
|
||||
users. To integrate with Active Directory, you configure an `active_directory`
|
||||
realm and map Active Directory users and groups to {security} roles in the
|
||||
<<mapping-roles, role mapping file>>.
|
||||
|
||||
See {ref}/configuring-ad-realm.html[Configuring an Active Directory Realm].
|
||||
|
||||
{security} uses LDAP to communicate with Active Directory, so `active_directory`
|
||||
realms are similar to <<ldap-realm, `ldap` realms>>. Like LDAP directories,
|
||||
Active Directory stores users and groups hierarchically. The directory's
|
||||
hierarchy is built from containers such as the _organizational unit_ (`ou`),
|
||||
_organization_ (`o`), and _domain controller_ (`dc`).
|
||||
|
||||
The path to an entry is a _Distinguished Name_ (DN) that uniquely identifies a
|
||||
user or group. User and group names typically have attributes such as a
|
||||
_common name_ (`cn`) or _unique ID_ (`uid`). A DN is specified as a string, for
|
||||
example `"cn=admin,dc=example,dc=com"` (white spaces are ignored).
|
||||
|
||||
{security} only supports Active Directory security groups. You cannot map
|
||||
distribution groups to roles.
|
||||
|
||||
NOTE: When you use Active Directory for authentication, the username entered by
|
||||
the user is expected to match the `sAMAccountName` or `userPrincipalName`,
|
||||
not the common name.
|
||||
|
||||
The Active Directory realm authenticates users using an LDAP bind request. After
|
||||
authenticating the user, the realm then searches to find the user's entry in
|
||||
Active Directory. Once the user has been found, the Active Directory realm then
|
||||
retrieves the user's group memberships from the `tokenGroups` attribute on the
|
||||
user's entry in Active Directory.
|
||||
|
||||
[[ad-load-balancing]]
|
||||
==== Load balancing and failover
|
||||
The `load_balance.type` setting can be used at the realm level to configure how
|
||||
{security} should interact with multiple Active Directory servers. Two modes of
|
||||
operation are supported: failover and load balancing.
|
||||
|
||||
See {ref}/security-settings.html#load-balancing[Load Balancing and Failover Settings].
|
||||
|
||||
[[ad-settings]]
|
||||
==== Active Directory realm settings
|
||||
|
||||
See {ref}/security-settings.html#ref-ad-settings[Active Directory Realm Settings].
|
||||
|
||||
[[mapping-roles-ad]]
|
||||
==== Mapping Active Directory users and groups to roles
|
||||
|
||||
See {ref}/configuring-ad-realm.html[Configuring an Active Directory realm].
|
||||
|
||||
[[ad-user-metadata]]
|
||||
==== User metadata in Active Directory realms
|
||||
When a user is authenticated via an Active Directory realm, the following
|
||||
properties are populated in the user's _metadata_:
|
||||
|
||||
|=======================
|
||||
| Field | Description
|
||||
| `ldap_dn` | The distinguished name of the user.
|
||||
| `ldap_groups` | The distinguished name of each of the groups that were
|
||||
resolved for the user (regardless of whether those
|
||||
groups were mapped to a role).
|
||||
|=======================
|
||||
|
||||
This metadata is returned in the
|
||||
{ref}/security-api-authenticate.html[authenticate API] and can be used with
|
||||
<<templating-role-query, templated queries>> in roles.
|
||||
|
||||
Additional metadata can be extracted from the Active Directory server by configuring
|
||||
the `metadata` setting on the Active Directory realm.
|
||||
|
||||
[[active-directory-ssl]]
|
||||
==== Setting up SSL between Elasticsearch and Active Directory
|
||||
|
||||
See
|
||||
{ref}/configuring-tls.html#tls-active-directory[Encrypting communications between {es} and Active Directory].
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue