diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 0f3e6c62c9b..1559038aef4 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
-lucene = 7.3.0
+lucene = 7.4.0-snapshot-1ed95c097b
# optional dependencies
spatial4j = 0.7
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
index 445fd7c6a99..5aa64a5c137 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
@@ -43,12 +43,16 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import java.io.IOException;
import java.util.Collections;
@@ -265,6 +269,28 @@ public final class IndicesClient {
listener, emptySet(), headers);
}
+ /**
+ * Retrieve the settings of one or more indices
+ *
+ * See
+ * Indices Get Settings API on elastic.co
+ */
+ public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, Header... headers) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, RequestConverters::getSettings,
+ GetSettingsResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously retrieve the settings of one or more indices
+ *
+ * See
+ * Indices Get Settings API on elastic.co
+ */
+ public void getSettingsAsync(GetSettingsRequest getSettingsRequest, ActionListener listener, Header... headers) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, RequestConverters::getSettings,
+ GetSettingsResponse::fromXContent, listener, emptySet(), headers);
+ }
+
/**
* Force merge one or more indices using the Force Merge API
*
@@ -432,4 +458,26 @@ public final class IndicesClient {
UpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
}
+ /**
+ * Puts an index template using the Index Templates API
+ *
+ * See Index Templates API
+ * on elastic.co
+ */
+ public PutIndexTemplateResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest, Header... headers) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate,
+ PutIndexTemplateResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously puts an index template using the Index Templates API
+ *
+ * See Index Templates API
+ * on elastic.co
+ */
+ public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest,
+ ActionListener listener, Header... headers) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate,
+ PutIndexTemplateResponse::fromXContent, listener, emptySet(), headers);
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index d4cac4cc635..720c934026b 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -44,8 +44,10 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
@@ -76,7 +78,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
-import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@@ -85,10 +86,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
-import java.util.Collections;
-import java.util.HashMap;
import java.util.Locale;
-import java.util.Map;
import java.util.StringJoiner;
final class RequestConverters {
@@ -600,6 +598,22 @@ final class RequestConverters {
return request;
}
+ static Request getSettings(GetSettingsRequest getSettingsRequest) throws IOException {
+ String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices();
+ String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names();
+
+ String endpoint = endpoint(indices, "_settings", names);
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ Params params = new Params(request);
+ params.withIndicesOptions(getSettingsRequest.indicesOptions());
+ params.withLocal(getSettingsRequest.local());
+ params.withIncludeDefaults(getSettingsRequest.includeDefaults());
+ params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
+
+ return request;
+ }
+
static Request indicesExist(GetIndexRequest getIndexRequest) {
// this can be called with no indices as argument by transport client, not via REST though
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
@@ -630,6 +644,21 @@ final class RequestConverters {
return request;
}
+ static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
+ String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
+ Request request = new Request(HttpPut.METHOD_NAME, endpoint);
+ Params params = new Params(request);
+ params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
+ if (putIndexTemplateRequest.create()) {
+ params.putParam("create", Boolean.TRUE.toString());
+ }
+ if (Strings.hasText(putIndexTemplateRequest.cause())) {
+ params.putParam("cause", putIndexTemplateRequest.cause());
+ }
+ request.setEntity(createEntity(putIndexTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
index 0feb78d66b2..931447d85d4 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
@@ -51,14 +51,19 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
@@ -71,11 +76,19 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractRawValues;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
import static org.hamcrest.CoreMatchers.hasItem;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasEntry;
+import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
@@ -189,6 +202,108 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
}
}
+ public void testGetSettings() throws IOException {
+ String indexName = "get_settings_index";
+ Settings basicSettings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(indexName, basicSettings);
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName);
+ GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+
+ assertNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval"));
+ assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
+
+ updateIndexSettings(indexName, Settings.builder().put("refresh_interval", "30s"));
+
+ GetSettingsResponse updatedResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+ assertEquals("30s", updatedResponse.getSetting(indexName, "index.refresh_interval"));
+ }
+
+ public void testGetSettingsNonExistentIndex() throws IOException {
+ String nonExistentIndex = "index_that_doesnt_exist";
+ assertFalse(indexExists(nonExistentIndex));
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(nonExistentIndex);
+ ElasticsearchException exception = expectThrows(ElasticsearchException.class,
+ () -> execute(getSettingsRequest, highLevelClient().indices()::getSettings, highLevelClient().indices()::getSettingsAsync));
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ }
+
+ public void testGetSettingsFromMultipleIndices() throws IOException {
+ String indexName1 = "get_multiple_settings_one";
+ createIndex(indexName1, Settings.builder().put("number_of_shards", 2).build());
+
+ String indexName2 = "get_multiple_settings_two";
+ createIndex(indexName2, Settings.builder().put("number_of_shards", 3).build());
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices("get_multiple_settings*");
+ GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+
+ assertEquals("2", getSettingsResponse.getSetting(indexName1, "index.number_of_shards"));
+ assertEquals("3", getSettingsResponse.getSetting(indexName2, "index.number_of_shards"));
+ }
+
+ public void testGetSettingsFiltered() throws IOException {
+ String indexName = "get_settings_index";
+ Settings basicSettings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(indexName, basicSettings);
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).names("index.number_of_shards");
+ GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+
+ assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas"));
+ assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
+ assertEquals(1, getSettingsResponse.getIndexToSettings().get("get_settings_index").size());
+ }
+
+ public void testGetSettingsWithDefaults() throws IOException {
+ String indexName = "get_settings_index";
+ Settings basicSettings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(indexName, basicSettings);
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true);
+ GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+
+ assertNotNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval"));
+ assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL,
+ getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").getAsTime("index.refresh_interval", null));
+ assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
+ }
+
+ public void testGetSettingsWithDefaultsFiltered() throws IOException {
+ String indexName = "get_settings_index";
+ Settings basicSettings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(indexName, basicSettings);
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest()
+ .indices(indexName)
+ .names("index.refresh_interval")
+ .includeDefaults(true);
+ GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
+ highLevelClient().indices()::getSettingsAsync);
+
+ assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas"));
+ assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
+ assertEquals(0, getSettingsResponse.getIndexToSettings().get("get_settings_index").size());
+ assertEquals(1, getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").size());
+ }
public void testPutMapping() throws IOException {
{
// Add mappings to index
@@ -708,4 +823,59 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
+ "or check the breaking changes documentation for removed settings]"));
}
+ @SuppressWarnings("unchecked")
+ public void testPutTemplate() throws Exception {
+ PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
+ .name("my-template")
+ .patterns(Arrays.asList("pattern-1", "name-*"))
+ .order(10)
+ .create(randomBoolean())
+ .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0"))
+ .mapping("doc", "host_name", "type=keyword", "description", "type=text")
+ .alias(new Alias("alias-1").indexRouting("abc")).alias(new Alias("{index}-write").searchRouting("xyz"));
+
+ PutIndexTemplateResponse putTemplateResponse = execute(putTemplateRequest,
+ highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplateAsync);
+ assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
+
+ Map templates = getAsMap("/_template/my-template");
+ assertThat(templates.keySet(), hasSize(1));
+ assertThat(extractValue("my-template.order", templates), equalTo(10));
+ assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*"));
+ assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3"));
+ assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0"));
+ assertThat(extractValue("my-template.mappings.doc.properties.host_name.type", templates), equalTo("keyword"));
+ assertThat(extractValue("my-template.mappings.doc.properties.description.type", templates), equalTo("text"));
+ assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc"));
+ assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz"));
+ }
+
+ public void testPutTemplateBadRequests() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ // Failed to validate because index patterns are missing
+ PutIndexTemplateRequest withoutPattern = new PutIndexTemplateRequest("t1");
+ ValidationException withoutPatternError = expectThrows(ValidationException.class,
+ () -> execute(withoutPattern, client.indices()::putTemplate, client.indices()::putTemplateAsync));
+ assertThat(withoutPatternError.validationErrors(), contains("index patterns are missing"));
+
+ // Create-only specified but an template exists already
+ PutIndexTemplateRequest goodTemplate = new PutIndexTemplateRequest("t2").patterns(Arrays.asList("qa-*", "prod-*"));
+ assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged());
+ goodTemplate.create(true);
+ ElasticsearchException alreadyExistsError = expectThrows(ElasticsearchException.class,
+ () -> execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync));
+ assertThat(alreadyExistsError.getDetailedMessage(),
+ containsString("[type=illegal_argument_exception, reason=index_template [t2] already exists]"));
+ goodTemplate.create(false);
+ assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged());
+
+ // Rejected due to unknown settings
+ PutIndexTemplateRequest unknownSettingTemplate = new PutIndexTemplateRequest("t3")
+ .patterns(Collections.singletonList("any"))
+ .settings(Settings.builder().put("this-setting-does-not-exist", 100));
+ ElasticsearchStatusException unknownSettingError = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(unknownSettingTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync));
+ assertThat(unknownSettingError.getDetailedMessage(), containsString("unknown setting [index.this-setting-does-not-exist]"));
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index 3f9428a3aea..70c209c30ab 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -26,12 +26,11 @@ import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
+import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
@@ -46,9 +45,11 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.delete.DeleteRequest;
@@ -69,6 +70,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
@@ -76,14 +78,13 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lucene.uid.Versions;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
-import org.elasticsearch.client.RequestConverters.EndpointBuilder;
-import org.elasticsearch.client.RequestConverters.Params;
import org.elasticsearch.index.RandomCreateIndexGenerator;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.TermQueryBuilder;
@@ -92,7 +93,6 @@ import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.rankeval.RankEvalSpec;
import org.elasticsearch.index.rankeval.RatedRequest;
import org.elasticsearch.index.rankeval.RestRankEvalAction;
-import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
@@ -109,8 +109,6 @@ import org.elasticsearch.test.RandomObjects;
import java.io.IOException;
import java.io.InputStream;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -119,7 +117,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Set;
import java.util.StringJoiner;
import java.util.function.Consumer;
import java.util.function.Function;
@@ -405,6 +402,52 @@ public class RequestConvertersTests extends ESTestCase {
assertNull(request.getEntity());
}
+ public void testGetSettings() throws IOException {
+ String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5);
+
+ GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest);
+
+ Map expectedParams = new HashMap<>();
+ setRandomMasterTimeout(getSettingsRequest, expectedParams);
+ setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, expectedParams);
+
+ setRandomLocal(getSettingsRequest, expectedParams);
+
+ if (randomBoolean()) {
+ //the request object will not have include_defaults present unless it is set to true
+ getSettingsRequest.includeDefaults(randomBoolean());
+ if (getSettingsRequest.includeDefaults()) {
+ expectedParams.put("include_defaults", Boolean.toString(true));
+ }
+ }
+
+ StringJoiner endpoint = new StringJoiner("/", "/", "");
+ if (indicesUnderTest != null && indicesUnderTest.length > 0) {
+ endpoint.add(String.join(",", indicesUnderTest));
+ }
+ endpoint.add("_settings");
+
+ if (randomBoolean()) {
+ String[] names = randomBoolean() ? null : new String[randomIntBetween(0, 3)];
+ if (names != null) {
+ for (int x = 0; x < names.length; x++) {
+ names[x] = randomAlphaOfLengthBetween(3, 10);
+ }
+ }
+ getSettingsRequest.names(names);
+ if (names != null && names.length > 0) {
+ endpoint.add(String.join(",", names));
+ }
+ }
+
+ Request request = RequestConverters.getSettings(getSettingsRequest);
+
+ assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
+ assertThat(request.getParameters(), equalTo(expectedParams));
+ assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
+ assertThat(request.getEntity(), nullValue());
+ }
+
public void testDeleteIndexEmptyIndices() {
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate();
@@ -1384,6 +1427,48 @@ public class RequestConvertersTests extends ESTestCase {
assertEquals(expectedParams, request.getParameters());
}
+ public void testPutTemplateRequest() throws Exception {
+ Map names = new HashMap<>();
+ names.put("log", "log");
+ names.put("template#1", "template%231");
+ names.put("-#template", "-%23template");
+ names.put("foo^bar", "foo%5Ebar");
+
+ PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
+ .name(randomFrom(names.keySet()))
+ .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
+ if (randomBoolean()) {
+ putTemplateRequest.order(randomInt());
+ }
+ if (randomBoolean()) {
+ putTemplateRequest.version(randomInt());
+ }
+ if (randomBoolean()) {
+ putTemplateRequest.settings(Settings.builder().put("setting-" + randomInt(), randomTimeValue()));
+ }
+ if (randomBoolean()) {
+ putTemplateRequest.mapping("doc-" + randomInt(), "field-" + randomInt(), "type=" + randomFrom("text", "keyword"));
+ }
+ if (randomBoolean()) {
+ putTemplateRequest.alias(new Alias("alias-" + randomInt()));
+ }
+ Map expectedParams = new HashMap<>();
+ if (randomBoolean()) {
+ expectedParams.put("create", Boolean.TRUE.toString());
+ putTemplateRequest.create(true);
+ }
+ if (randomBoolean()) {
+ String cause = randomUnicodeOfCodepointLengthBetween(1, 50);
+ putTemplateRequest.cause(cause);
+ expectedParams.put("cause", cause);
+ }
+ setRandomMasterTimeout(putTemplateRequest, expectedParams);
+ Request request = RequestConverters.putTemplate(putTemplateRequest);
+ assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name())));
+ assertThat(request.getParameters(), equalTo(expectedParams));
+ assertToXContentBody(putTemplateRequest, request.getEntity());
+ }
+
private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException {
BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false);
assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue());
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
index 24c321f87f9..1dd9834d8f5 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
@@ -50,9 +50,15 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
+import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
+import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.IndicesOptions;
@@ -69,11 +75,14 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
+import static org.hamcrest.Matchers.equalTo;
+
/**
* This class is used to generate the Java Indices API documentation.
* You need to wrap your code between two tags like:
@@ -775,6 +784,119 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
+ public void testGetSettings() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ {
+ Settings settings = Settings.builder().put("number_of_shards", 3).build();
+ CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings));
+ assertTrue(createIndexResponse.isAcknowledged());
+ }
+
+ // tag::get-settings-request
+ GetSettingsRequest request = new GetSettingsRequest().indices("index"); // <1>
+ // end::get-settings-request
+
+ // tag::get-settings-request-names
+ request.names("index.number_of_shards"); // <1>
+ // end::get-settings-request-names
+
+ // tag::get-settings-request-indicesOptions
+ request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
+ // end::get-settings-request-indicesOptions
+
+ // tag::get-settings-execute
+ GetSettingsResponse getSettingsResponse = client.indices().getSettings(request);
+ // end::get-settings-execute
+
+ // tag::get-settings-response
+ String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards"); // <1>
+ Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index"); // <2>
+ Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <3>
+ // end::get-settings-response
+
+ assertEquals("3", numberOfShardsString);
+ assertEquals(Integer.valueOf(3), numberOfShards);
+
+ assertNull("refresh_interval returned but was never set!",
+ getSettingsResponse.getSetting("index", "index.refresh_interval"));
+
+ // tag::get-settings-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(GetSettingsResponse GetSettingsResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::get-settings-execute-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::get-settings-execute-async
+ client.indices().getSettingsAsync(request, listener); // <1>
+ // end::get-settings-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+
+ public void testGetSettingsWithDefaults() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ {
+ Settings settings = Settings.builder().put("number_of_shards", 3).build();
+ CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings));
+ assertTrue(createIndexResponse.isAcknowledged());
+ }
+
+ GetSettingsRequest request = new GetSettingsRequest().indices("index");
+ request.indicesOptions(IndicesOptions.lenientExpandOpen());
+
+ // tag::get-settings-request-include-defaults
+ request.includeDefaults(true); // <1>
+ // end::get-settings-request-include-defaults
+
+ GetSettingsResponse getSettingsResponse = client.indices().getSettings(request);
+ String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards");
+ Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index");
+ Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null);
+
+ // tag::get-settings-defaults-response
+ String refreshInterval = getSettingsResponse.getSetting("index", "index.refresh_interval"); // <1>
+ Settings indexDefaultSettings = getSettingsResponse.getIndexToDefaultSettings().get("index"); // <2>
+ // end::get-settings-defaults-response
+
+ assertEquals("3", numberOfShardsString);
+ assertEquals(Integer.valueOf(3), numberOfShards);
+ assertNotNull("with defaults enabled we should get a value for refresh_interval!", refreshInterval);
+
+ assertEquals(refreshInterval, indexDefaultSettings.get("index.refresh_interval"));
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(GetSettingsResponse GetSettingsResponse) {
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ }
+ };
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ client.indices().getSettingsAsync(request, listener);
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+
public void testForceMergeIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@@ -1483,4 +1605,164 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
+ public void testPutTemplate() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ // tag::put-template-request
+ PutIndexTemplateRequest request = new PutIndexTemplateRequest("my-template"); // <1>
+ request.patterns(Arrays.asList("pattern-1", "log-*")); // <2>
+ // end::put-template-request
+
+ // tag::put-template-request-settings
+ request.settings(Settings.builder() // <1>
+ .put("index.number_of_shards", 3)
+ .put("index.number_of_replicas", 1)
+ );
+ // end::put-template-request-settings
+
+ {
+ // tag::put-template-request-mappings-json
+ request.mapping("tweet", // <1>
+ "{\n" +
+ " \"tweet\": {\n" +
+ " \"properties\": {\n" +
+ " \"message\": {\n" +
+ " \"type\": \"text\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ "}", // <2>
+ XContentType.JSON);
+ // end::put-template-request-mappings-json
+ assertTrue(client.indices().putTemplate(request).isAcknowledged());
+ }
+ {
+ //tag::put-template-request-mappings-map
+ Map jsonMap = new HashMap<>();
+ Map message = new HashMap<>();
+ message.put("type", "text");
+ Map properties = new HashMap<>();
+ properties.put("message", message);
+ Map tweet = new HashMap<>();
+ tweet.put("properties", properties);
+ jsonMap.put("tweet", tweet);
+ request.mapping("tweet", jsonMap); // <1>
+ //end::put-template-request-mappings-map
+ assertTrue(client.indices().putTemplate(request).isAcknowledged());
+ }
+ {
+ //tag::put-template-request-mappings-xcontent
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ builder.startObject();
+ {
+ builder.startObject("tweet");
+ {
+ builder.startObject("properties");
+ {
+ builder.startObject("message");
+ {
+ builder.field("type", "text");
+ }
+ builder.endObject();
+ }
+ builder.endObject();
+ }
+ builder.endObject();
+ }
+ builder.endObject();
+ request.mapping("tweet", builder); // <1>
+ //end::put-template-request-mappings-xcontent
+ assertTrue(client.indices().putTemplate(request).isAcknowledged());
+ }
+ {
+ //tag::put-template-request-mappings-shortcut
+ request.mapping("tweet", "message", "type=text"); // <1>
+ //end::put-template-request-mappings-shortcut
+ assertTrue(client.indices().putTemplate(request).isAcknowledged());
+ }
+
+ // tag::put-template-request-aliases
+ request.alias(new Alias("twitter_alias").filter(QueryBuilders.termQuery("user", "kimchy"))); // <1>
+ request.alias(new Alias("{index}_alias").searchRouting("xyz")); // <2>
+ // end::put-template-request-aliases
+
+ // tag::put-template-request-order
+ request.order(20); // <1>
+ // end::put-template-request-order
+
+ // tag::put-template-request-version
+ request.version(4); // <1>
+ // end::put-template-request-version
+
+ // tag::put-template-whole-source
+ request.source("{\n" +
+ " \"index_patterns\": [\n" +
+ " \"log-*\",\n" +
+ " \"pattern-1\"\n" +
+ " ],\n" +
+ " \"order\": 1,\n" +
+ " \"settings\": {\n" +
+ " \"number_of_shards\": 1\n" +
+ " },\n" +
+ " \"mappings\": {\n" +
+ " \"tweet\": {\n" +
+ " \"properties\": {\n" +
+ " \"message\": {\n" +
+ " \"type\": \"text\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " \"aliases\": {\n" +
+ " \"alias-1\": {},\n" +
+ " \"{index}-alias\": {}\n" +
+ " }\n" +
+ "}", XContentType.JSON); // <1>
+ // end::put-template-whole-source
+
+ // tag::put-template-request-create
+ request.create(true); // <1>
+ // end::put-template-request-create
+
+ // tag::put-template-request-masterTimeout
+ request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
+ request.masterNodeTimeout("1m"); // <2>
+ // end::put-template-request-masterTimeout
+
+ request.create(false); // make test happy
+
+ // tag::put-template-execute
+ PutIndexTemplateResponse putTemplateResponse = client.indices().putTemplate(request);
+ // end::put-template-execute
+
+ // tag::put-template-response
+ boolean acknowledged = putTemplateResponse.isAcknowledged(); // <1>
+ // end::put-template-response
+ assertTrue(acknowledged);
+
+ // tag::put-template-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(PutIndexTemplateResponse putTemplateResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::put-template-execute-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::put-template-execute-async
+ client.indices().putTemplateAsync(request, listener); // <1>
+ // end::put-template-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
}
diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc
index 7c14d41724a..03811ae85c7 100644
--- a/docs/CHANGELOG.asciidoc
+++ b/docs/CHANGELOG.asciidoc
@@ -3,6 +3,10 @@
[partintro]
--
+// To add a release, copy and paste the template text
+// and add a link to the new section. Note that release subheads must
+// be floated and sections cannot be empty.
+
// Use these for links to issue and pulls. Note issues and pulls redirect one to
// each other on Github, so don't worry too much on using the right prefix.
:issue: https://github.com/elastic/elasticsearch/issues/
@@ -12,13 +16,52 @@ This section summarizes the changes in each release.
* <>
* <>
-
+* <>
--
+////
+// To add a release, copy and paste the following text, uncomment the relevant
+// sections, and add a link to the new section in the list of releases at the
+// top of the page. Note that release subheads must be floated and sections
+// cannot be empty.
+// TEMPLATE:
+
+// [[release-notes-n.n.n]]
+// == {es} n.n.n
+
+//[float]
+[[breaking-n.n.n]]
+//=== Breaking Changes
+
+//[float]
+//=== Breaking Java Changes
+
+//[float]
+//=== Deprecations
+
+//[float]
+//=== New Features
+
+//[float]
+//=== Enhancements
+
+//[float]
+//=== Bug Fixes
+
+//[float]
+//=== Regressions
+
+//[float]
+//=== Known Issues
+
+////
+
[[release-notes-7.0.0]]
== {es} 7.0.0
+coming[7.0.0]
+
[float]
[[breaking-7.0.0]]
=== Breaking Changes
@@ -37,6 +80,10 @@ Machine Learning::
* <> ({pull}29601[#29601])
+//[float]
+//=== Breaking Java Changes
+
+[float]
=== Deprecations
Monitoring::
* The `xpack.monitoring.collection.interval` setting can no longer be set to `-1`
@@ -48,6 +95,106 @@ Security::
mappings, get field mappings, and field capabilities API are now only the
ones that the user is authorized to access in case field level security is enabled.
+//[float]
+//=== New Features
+
+//[float]
+//=== Enhancements
+
+[float]
+=== Bug Fixes
+
+Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions
+({pull}29000[#29000])
+
+Rollup::
+* Validate timezone in range queries to ensure they match the selected job when
+searching ({pull}30338[#30338])
+
+[float]
+=== Regressions
+Fail snapshot operations early when creating or deleting a snapshot on a repository that has been
+written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140])
+
+Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
+
+//[float]
+//=== Regressions
+
+//[float]
+//=== Known Issues
+
+[[release-notes-6.4.0]]
+== {es} 6.4.0
+
+coming[6.4.0]
+
+//[float]
+[[breaking-6.4.0]]
+//=== Breaking Changes
+
+//[float]
+//=== Breaking Java Changes
+
+//[float]
+//=== Deprecations
+
+[float]
+=== New Features
+
+The new <> field allows to know which fields
+got ignored at index time because of the <>
+option. ({pull}30140[#29658])
+
+A new analysis plugin called `analysis_nori` that exposes the Lucene Korean
+analysis module. ({pull}30397[#30397])
+
+[float]
+=== Enhancements
+
+{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow copying source settings on index resize operations] ({pull}30255[#30255])
+
+Added new "Request" object flavored request methods. Prefer these instead of the
+multi-argument versions. ({pull}29623[#29623])
+
+The cluster state listener to decide if watcher should be
+stopped/started/paused now runs far less code in an executor but is more
+synchronous and predictable. Also the trigger engine thread is only started on
+data nodes. And the Execute Watch API can be triggered regardless is watcher is
+started or stopped. ({pull}30118[#30118])
+
+Added put index template API to the high level rest client ({pull}30400[#30400])
+
+[float]
+=== Bug Fixes
+
+Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216])
+
+Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
+
+Machine Learning::
+
+* Account for gaps in data counts after job is reopened ({pull}30294[#30294])
+
+Rollup::
+* Validate timezone in range queries to ensure they match the selected job when
+searching ({pull}30338[#30338])
+
+//[float]
+//=== Regressions
+
+//[float]
+//=== Known Issues
+
+[[release-notes-6.3.1]]
+== Elasticsearch version 6.3.1
+
+coming[6.3.1]
+
+//[float]
+[[breaking-6.3.1]]
+//=== Breaking Changes
+
//[float]
//=== Breaking Java Changes
@@ -63,51 +210,9 @@ ones that the user is authorized to access in case field level security is enabl
[float]
=== Bug Fixes
-Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions
-({pull}29000[#29000])
+Reduce the number of object allocations made by {security} when resolving the indices and aliases for a request ({pull}30180[#30180])
-=== Regressions
-Fail snapshot operations early when creating or deleting a snapshot on a repository that has been
-written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140])
-
-Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
-
-//[float]
-//=== Regressions
-
-//[float]
-//=== Known Issues
-
-[[release-notes-6.4.0]]
-== {es} 6.4.0
-
-[float]
-=== New Features
-
-The new <> field allows to know which fields
-got ignored at index time because of the <>
-option. ({pull}30140[#29658])
-
-[float]
-=== Enhancements
-
-{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow copying source settings on index resize operations] ({pull}30255[#30255])
-
-Added new "Request" object flavored request methods. Prefer these instead of the
-multi-argument versions. ({pull}29623[#29623])
-
-The cluster state listener to decide if watcher should be
-stopped/started/paused now runs far less code in an executor but is more
-synchronous and predictable. Also the trigger engine thread is only started on
-data nodes. And the Execute Watch API can be triggered regardless is watcher is
-started or stopped. ({pull}30118[#30118])
-
-[float]
-=== Bug Fixes
-
-Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216])
-
-Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
+Respect accept header on requests with no handler ({pull}30383[#30383])
//[float]
//=== Regressions
diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc
index 9f7fdc9ea2f..608d5b9288e 100644
--- a/docs/Versions.asciidoc
+++ b/docs/Versions.asciidoc
@@ -1,7 +1,7 @@
:version: 7.0.0-alpha1
:major-version: 7.x
-:lucene_version: 7.3.0
-:lucene_version_path: 7_3_0
+:lucene_version: 7.4.0
+:lucene_version_path: 7_4_0
:branch: master
:jdk: 1.8.0_131
:jdk_major: 8
diff --git a/docs/build.gradle b/docs/build.gradle
index 5057bead62d..e8c406594b2 100644
--- a/docs/build.gradle
+++ b/docs/build.gradle
@@ -32,6 +32,7 @@ integTestCluster {
configFile 'analysis/synonym.txt'
configFile 'analysis/stemmer_override.txt'
configFile 'userdict_ja.txt'
+ configFile 'userdict_ko.txt'
configFile 'KeywordTokenizer.rbbi'
extraConfigFile 'hunspell/en_US/en_US.aff', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.aff'
extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic'
diff --git a/docs/java-rest/high-level/indices/get_settings.asciidoc b/docs/java-rest/high-level/indices/get_settings.asciidoc
new file mode 100644
index 00000000000..b054715119e
--- /dev/null
+++ b/docs/java-rest/high-level/indices/get_settings.asciidoc
@@ -0,0 +1,96 @@
+[[java-rest-high-get-settings]]
+=== Get Settings API
+
+[[java-rest-high-get-settings-request]]
+==== Get Settings Request
+
+A `GetSettingsRequest` requires one or more `index` arguments:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request]
+--------------------------------------------------
+<1> The index whose settings we should retrieve
+
+==== Optional arguments
+The following arguments can optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-names]
+--------------------------------------------------
+<1> One or more settings that be the only settings retrieved. If unset, all settings will be retrieved
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-include-defaults]
+--------------------------------------------------
+<1> If true, defaults will be returned for settings not explicitly set on the index
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-indicesOptions]
+--------------------------------------------------
+<1> Setting `IndicesOptions` controls how unavailable indices are resolved and
+how wildcard expressions are expanded
+
+[[java-rest-high-get-settings-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute]
+--------------------------------------------------
+
+[[java-rest-high-get-settings-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of a Get Settings request requires both the `GetSettingsRequest`
+instance and an `ActionListener` instance to be passed to the asynchronous
+method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-async]
+--------------------------------------------------
+<1> The `GetSettingsRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `GetSettingsResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of failure. The raised exception is provided as an argument
+
+[[java-rest-high-get-settings-response]]
+==== Get Settings Response
+
+The returned `GetSettingsResponse` allows to retrieve information about the
+executed operation as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-response]
+--------------------------------------------------
+<1> We can retrieve the setting value for a particular index directly from the response as a string
+<2> We can also retrieve the Settings object for a particular index for further examination
+<3> The returned Settings object provides convenience methods for non String types
+
+If the `includeDefaults` flag was set to true in the `GetSettingsRequest`, the
+behavior of `GetSettingsResponse` will differ somewhat.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-defaults-response]
+--------------------------------------------------
+<1> Individual default setting values may be retrieved directly from the `GetSettingsResponse`
+<2> We may retrieve a Settings object for an index that contains those settings with default values
diff --git a/docs/java-rest/high-level/indices/put_template.asciidoc b/docs/java-rest/high-level/indices/put_template.asciidoc
new file mode 100644
index 00000000000..7f0f3a1fee7
--- /dev/null
+++ b/docs/java-rest/high-level/indices/put_template.asciidoc
@@ -0,0 +1,168 @@
+[[java-rest-high-put-template]]
+=== Put Template API
+
+[[java-rest-high-put-template-request]]
+==== Put Index Template Request
+
+A `PutIndexTemplateRequest` specifies the `name` of a template and `patterns`
+which controls whether the template should be applied to the new index.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request]
+--------------------------------------------------
+<1> The name of the template
+<2> The patterns of the template
+
+==== Settings
+The settings of the template will be applied to the new index whose name matches the
+template's patterns.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-settings]
+--------------------------------------------------
+<1> Settings for this template
+
+[[java-rest-high-put-template-request-mappings]]
+==== Mappings
+The mapping of the template will be applied to the new index whose name matches the
+template's patterns.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-json]
+--------------------------------------------------
+<1> The type to define
+<2> The mapping for this type, provided as a JSON string
+
+The mapping source can be provided in different ways in addition to the
+`String` example shown above:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-map]
+--------------------------------------------------
+<1> Mapping source provided as a `Map` which gets automatically converted
+to JSON format
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-xcontent]
+--------------------------------------------------
+<1> Mapping source provided as an `XContentBuilder` object, the Elasticsearch
+built-in helpers to generate JSON content
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-shortcut]
+--------------------------------------------------
+<1> Mapping source provided as `Object` key-pairs, which gets converted to
+JSON format
+
+==== Aliases
+The aliases of the template will define aliasing to the index whose name matches the
+template's patterns. A placeholder `{index}` can be used in an alias of a template.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-aliases]
+--------------------------------------------------
+<1> The alias to define
+<2> The alias to define with placeholder
+
+==== Order
+In case multiple templates match an index, the orders of matching templates determine
+the sequence that settings, mappings, and alias of each matching template is applied.
+Templates with lower orders are applied first, and higher orders override them.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-order]
+--------------------------------------------------
+<1> The order of the template
+
+==== Version
+A template can optionally specify a version number which can be used to simplify template
+management by external systems.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-version]
+--------------------------------------------------
+<1> The version number of the template
+
+==== Providing the whole source
+The whole source including all of its sections (mappings, settings and aliases)
+can also be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-whole-source]
+--------------------------------------------------
+<1> The source provided as a JSON string. It can also be provided as a `Map`
+or an `XContentBuilder`.
+
+==== Optional arguments
+The following arguments can optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-create]
+--------------------------------------------------
+<1> To force to only create a new template; do not overwrite the existing template
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-masterTimeout]
+--------------------------------------------------
+<1> Timeout to connect to the master node as a `TimeValue`
+<2> Timeout to connect to the master node as a `String`
+
+[[java-rest-high-put-template-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute]
+--------------------------------------------------
+
+[[java-rest-high-put-template-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of a put template request requires both the `PutIndexTemplateRequest`
+instance and an `ActionListener` instance to be passed to the asynchronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute-async]
+--------------------------------------------------
+<1> The `PutIndexTemplateRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `PutIndexTemplateResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of failure. The raised exception is provided as an argument
+
+[[java-rest-high-put-template-response]]
+==== Put Index Template Response
+
+The returned `PutIndexTemplateResponse` allows to retrieve information about the
+executed operation as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-response]
+--------------------------------------------------
+<1> Indicates whether all of the nodes have acknowledged the request
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index 1c0e09c6c07..6623e09242f 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -69,6 +69,7 @@ Index Management::
* <>
* <>
* <>
+* <>
Mapping Management::
* <>
@@ -93,6 +94,8 @@ include::indices/put_mapping.asciidoc[]
include::indices/update_aliases.asciidoc[]
include::indices/exists_alias.asciidoc[]
include::indices/put_settings.asciidoc[]
+include::indices/get_settings.asciidoc[]
+include::indices/put_template.asciidoc[]
== Cluster APIs
diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc
new file mode 100644
index 00000000000..dd47ca819a7
--- /dev/null
+++ b/docs/plugins/analysis-nori.asciidoc
@@ -0,0 +1,407 @@
+[[analysis-nori]]
+=== Korean (nori) Analysis Plugin
+
+The Korean (nori) Analysis plugin integrates Lucene nori analysis
+module into elasticsearch. It uses the https://bitbucket.org/eunjeon/mecab-ko-dic[mecab-ko-dic dictionary]
+to perform morphological analysis of Korean texts.
+
+:plugin_name: analysis-nori
+include::install_remove.asciidoc[]
+
+[[analysis-nori-analyzer]]
+==== `nori` analyzer
+
+The `nori` analyzer consists of the following tokenizer and token filters:
+
+* <>
+* <> token filter
+* <> token filter
+* {ref}/analysis-lowercase-tokenfilter.html[`lowercase`] token filter
+
+It supports the `decompound_mode` and `user_dictionary` settings from
+<> and the `stoptags` setting from
+<>.
+
+[[analysis-nori-tokenizer]]
+==== `nori_tokenizer`
+
+The `nori_tokenizer` accepts the following settings:
+
+`decompound_mode`::
++
+--
+
+The decompound mode determines how the tokenizer handles compound tokens.
+It can be set to:
+
+`none`::
+
+ No decomposition for compounds. Example output:
+
+ 가거도항
+ 가곡역
+
+`discard`::
+
+ Decomposes compounds and discards the original form (*default*). Example output:
+
+ 가곡역 => 가곡, 역
+
+`mixed`::
+
+ Decomposes compounds and keeps the original form. Example output:
+
+ 가곡역 => 가곡역, 가곡, 역
+--
+
+`user_dictionary`::
++
+--
+The Nori tokenizer uses the https://bitbucket.org/eunjeon/mecab-ko-dic[mecab-ko-dic dictionary] by default.
+A `user_dictionary` with custom nouns (`NNG`) may be appended to the default dictionary.
+The dictionary should have the following format:
+
+[source,txt]
+-----------------------
+ [ ... ]
+-----------------------
+
+The first token is mandatory and represents the custom noun that should be added in
+the dictionary. For compound nouns the custom segmentation can be provided
+after the first token (`[ ... ]`). The segmentation of the
+custom compound nouns is controlled by the `decompound_mode` setting.
+--
+
+As a demonstration of how the user dictionary can be used, save the following
+dictionary to `$ES_HOME/config/userdict_ko.txt`:
+
+[source,txt]
+-----------------------
+c++ <1>
+C샤프
+세종
+세종시 세종 시 <2>
+-----------------------
+
+<1> A simple noun
+<2> A compound noun (`세종시`) followed by its decomposition: `세종` and `시`.
+
+Then create an analyzer as follows:
+
+[source,js]
+--------------------------------------------------
+PUT nori_sample
+{
+ "settings": {
+ "index": {
+ "analysis": {
+ "tokenizer": {
+ "nori_user_dict": {
+ "type": "nori_tokenizer",
+ "decompound_mode": "mixed",
+ "user_dictionary": "userdict_ko.txt"
+ }
+ },
+ "analyzer": {
+ "my_analyzer": {
+ "type": "custom",
+ "tokenizer": "nori_user_dict"
+ }
+ }
+ }
+ }
+ }
+}
+
+GET nori_sample/_analyze
+{
+ "analyzer": "my_analyzer",
+ "text": "세종시" <1>
+}
+--------------------------------------------------
+// CONSOLE
+
+<1> Sejong city
+
+The above `analyze` request returns the following:
+
+[source,js]
+--------------------------------------------------
+{
+ "tokens" : [ {
+ "token" : "세종시",
+ "start_offset" : 0,
+ "end_offset" : 3,
+ "type" : "word",
+ "position" : 0,
+ "positionLength" : 2 <1>
+ }, {
+ "token" : "세종",
+ "start_offset" : 0,
+ "end_offset" : 2,
+ "type" : "word",
+ "position" : 0
+ }, {
+ "token" : "시",
+ "start_offset" : 2,
+ "end_offset" : 3,
+ "type" : "word",
+ "position" : 1
+ }]
+}
+--------------------------------------------------
+// TESTRESPONSE
+
+<1> This is a compound token that spans two positions (`mixed` mode).
+
+The `nori_tokenizer` sets a number of additional attributes per token that are used by token filters
+to modify the stream.
+You can view all these additional attributes with the following request:
+
+[source,js]
+--------------------------------------------------
+GET _analyze
+{
+ "tokenizer": "nori_tokenizer",
+ "text": "뿌리가 깊은 나무는", <1>
+ "attributes" : ["posType", "leftPOS", "rightPOS", "morphemes", "reading"],
+ "explain": true
+}
+--------------------------------------------------
+// CONSOLE
+
+<1> A tree with deep roots
+
+Which responds with:
+
+[source,js]
+--------------------------------------------------
+{
+ "detail": {
+ "custom_analyzer": true,
+ "charfilters": [],
+ "tokenizer": {
+ "name": "nori_tokenizer",
+ "tokens": [
+ {
+ "token": "뿌리",
+ "start_offset": 0,
+ "end_offset": 2,
+ "type": "word",
+ "position": 0,
+ "leftPOS": "NNG(General Noun)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "NNG(General Noun)"
+ },
+ {
+ "token": "가",
+ "start_offset": 2,
+ "end_offset": 3,
+ "type": "word",
+ "position": 1,
+ "leftPOS": "J(Ending Particle)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "J(Ending Particle)"
+ },
+ {
+ "token": "깊",
+ "start_offset": 4,
+ "end_offset": 5,
+ "type": "word",
+ "position": 2,
+ "leftPOS": "VA(Adjective)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "VA(Adjective)"
+ },
+ {
+ "token": "은",
+ "start_offset": 5,
+ "end_offset": 6,
+ "type": "word",
+ "position": 3,
+ "leftPOS": "E(Verbal endings)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "E(Verbal endings)"
+ },
+ {
+ "token": "나무",
+ "start_offset": 7,
+ "end_offset": 9,
+ "type": "word",
+ "position": 4,
+ "leftPOS": "NNG(General Noun)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "NNG(General Noun)"
+ },
+ {
+ "token": "는",
+ "start_offset": 9,
+ "end_offset": 10,
+ "type": "word",
+ "position": 5,
+ "leftPOS": "J(Ending Particle)",
+ "morphemes": null,
+ "posType": "MORPHEME",
+ "reading": null,
+ "rightPOS": "J(Ending Particle)"
+ }
+ ]
+ },
+ "tokenfilters": []
+ }
+}
+--------------------------------------------------
+// TESTRESPONSE
+
+[[analysis-nori-speech]]
+==== `nori_part_of_speech` token filter
+
+The `nori_part_of_speech` token filter removes tokens that match a set of
+part-of-speech tags. The list of supported tags and their meanings can be found here:
+{lucene_version_path}/org/apache/lucene/analysis/ko/POS.Tag.html[Part of speech tags]
+
+It accepts the following setting:
+
+`stoptags`::
+
+ An array of part-of-speech tags that should be removed.
+
+and defaults to:
+
+[source,js]
+--------------------------------------------------
+"stoptags": [
+ "E",
+ "IC",
+ "J",
+ "MAG", "MAJ", "MM",
+ "SP", "SSC", "SSO", "SC", "SE",
+ "XPN", "XSA", "XSN", "XSV",
+ "UNA", "NA", "VSV"
+]
+--------------------------------------------------
+// NOTCONSOLE
+
+For example:
+
+[source,js]
+--------------------------------------------------
+PUT nori_sample
+{
+ "settings": {
+ "index": {
+ "analysis": {
+ "analyzer": {
+ "my_analyzer": {
+ "tokenizer": "nori_tokenizer",
+ "filter": [
+ "my_posfilter"
+ ]
+ }
+ },
+ "filter": {
+ "my_posfilter": {
+ "type": "nori_part_of_speech",
+ "stoptags": [
+ "NR" <1>
+ ]
+ }
+ }
+ }
+ }
+ }
+}
+
+GET nori_sample/_analyze
+{
+ "analyzer": "my_analyzer",
+ "text": "여섯 용이" <2>
+}
+--------------------------------------------------
+// CONSOLE
+
+<1> Korean numerals should be removed (`NR`)
+<2> Six dragons
+
+Which responds with:
+
+[source,js]
+--------------------------------------------------
+{
+ "tokens" : [ {
+ "token" : "용",
+ "start_offset" : 3,
+ "end_offset" : 4,
+ "type" : "word",
+ "position" : 1
+ }, {
+ "token" : "이",
+ "start_offset" : 4,
+ "end_offset" : 5,
+ "type" : "word",
+ "position" : 2
+ } ]
+}
+--------------------------------------------------
+// TESTRESPONSE
+
+[[analysis-nori-readingform]]
+==== `nori_readingform` token filter
+
+The `nori_readingform` token filter rewrites tokens written in Hanja to their Hangul form.
+
+[source,js]
+--------------------------------------------------
+PUT nori_sample
+{
+ "settings": {
+ "index":{
+ "analysis":{
+ "analyzer" : {
+ "my_analyzer" : {
+ "tokenizer" : "nori_tokenizer",
+ "filter" : ["nori_readingform"]
+ }
+ }
+ }
+ }
+ }
+}
+
+GET nori_sample/_analyze
+{
+ "analyzer": "my_analyzer",
+ "text": "鄕歌" <1>
+}
+--------------------------------------------------
+// CONSOLE
+
+<1> A token written in Hanja: Hyangga
+
+Which responds with:
+
+[source,js]
+--------------------------------------------------
+{
+ "tokens" : [ {
+ "token" : "향가", <1>
+ "start_offset" : 0,
+ "end_offset" : 2,
+ "type" : "word",
+ "position" : 0
+ }]
+}
+--------------------------------------------------
+// TESTRESPONSE
+
+<1> The Hanja form is replaced by the Hangul translation.
diff --git a/docs/plugins/analysis.asciidoc b/docs/plugins/analysis.asciidoc
index c09c48640ea..875c87124ef 100644
--- a/docs/plugins/analysis.asciidoc
+++ b/docs/plugins/analysis.asciidoc
@@ -20,6 +20,10 @@ transliteration.
Advanced analysis of Japanese using the http://www.atilika.org/[Kuromoji analyzer].
+<>::
+
+Morphological analysis of Korean using the Lucene Nori analyzer.
+
<>::
Analyzes tokens into their phonetic equivalent using Soundex, Metaphone,
@@ -59,6 +63,8 @@ include::analysis-icu.asciidoc[]
include::analysis-kuromoji.asciidoc[]
+include::analysis-nori.asciidoc[]
+
include::analysis-phonetic.asciidoc[]
include::analysis-smartcn.asciidoc[]
diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc
index ca35a23d305..a9915d7aaa2 100644
--- a/docs/reference/cat/plugins.asciidoc
+++ b/docs/reference/cat/plugins.asciidoc
@@ -16,10 +16,11 @@ Might look like:
name component version description
U7321H6 analysis-icu {version} The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.
U7321H6 analysis-kuromoji {version} The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.
+U7321H6 analysis-nori {version} The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.
U7321H6 analysis-phonetic {version} The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.
U7321H6 analysis-smartcn {version} Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.
U7321H6 analysis-stempel {version} The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.
-U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
+U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
U7321H6 discovery-file {version} Discovery file plugin enables unicast discovery from hosts stored in a file.
diff --git a/docs/reference/mapping/params/normalizer.asciidoc b/docs/reference/mapping/params/normalizer.asciidoc
index c69e816380d..723f79c5dc4 100644
--- a/docs/reference/mapping/params/normalizer.asciidoc
+++ b/docs/reference/mapping/params/normalizer.asciidoc
@@ -7,7 +7,8 @@ produces a single token.
The `normalizer` is applied prior to indexing the keyword, as well as at
search-time when the `keyword` field is searched via a query parser such as
-the <> query.
+the <> query or via a term level query
+such as the <> query.
[source,js]
--------------------------------
@@ -53,6 +54,15 @@ PUT index/_doc/3
POST index/_refresh
+GET index/_search
+{
+ "query": {
+ "term": {
+ "foo": "BAR"
+ }
+ }
+}
+
GET index/_search
{
"query": {
@@ -64,7 +74,7 @@ GET index/_search
--------------------------------
// CONSOLE
-The above query matches documents 1 and 2 since `BÀR` is converted to `bar` at
+The above queries match documents 1 and 2 since `BÀR` is converted to `bar` at
both index and query time.
[source,js]
diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc
index 43ad71e3707..8768836484d 100644
--- a/docs/reference/mapping/types/geo-shape.asciidoc
+++ b/docs/reference/mapping/types/geo-shape.asciidoc
@@ -32,7 +32,7 @@ best tree_levels value to honor this precision. The value should be a
number followed by an optional distance unit. Valid distance units
include: `in`, `inch`, `yd`, `yard`, `mi`, `miles`, `km`, `kilometers`,
`m`,`meters`, `cm`,`centimeters`, `mm`, `millimeters`.
-| `meters`
+| `50m`
|`tree_levels` |Maximum number of layers to be used by the PrefixTree.
This can be used to control the precision of shape representations and
@@ -42,7 +42,7 @@ certain level of understanding of the underlying implementation, users
may use the `precision` parameter instead. However, Elasticsearch only
uses the tree_levels parameter internally and this is what is returned
via the mapping API even if you use the precision parameter.
-| `50m`
+| various
|`strategy` |The strategy parameter defines the approach for how to
represent shapes at indexing and search time. It also influences the
@@ -119,14 +119,14 @@ Geohashes are base32 encoded strings of the bits of the latitude and
longitude interleaved. So the longer the hash, the more precise it is.
Each character added to the geohash represents another tree level and
adds 5 bits of precision to the geohash. A geohash represents a
-rectangular area and has 32 sub rectangles. The maximum amount of levels
-in Elasticsearch is 24.
+rectangular area and has 32 sub rectangles. The maximum number of levels
+in Elasticsearch is 24; the default is 9.
* QuadPrefixTree - Uses a
http://en.wikipedia.org/wiki/Quadtree[quadtree] for grid squares.
Similar to geohash, quad trees interleave the bits of the latitude and
longitude the resulting hash is a bit set. A tree level in a quad tree
represents 2 bits in this bit set, one for each coordinate. The maximum
-amount of levels for the quad trees in Elasticsearch is 50.
+number of levels for the quad trees in Elasticsearch is 29; the default is 21.
[[spatial-strategy]]
[float]
diff --git a/docs/reference/query-dsl/term-level-queries.asciidoc b/docs/reference/query-dsl/term-level-queries.asciidoc
index 883fd4c36b5..f4e185ba959 100644
--- a/docs/reference/query-dsl/term-level-queries.asciidoc
+++ b/docs/reference/query-dsl/term-level-queries.asciidoc
@@ -3,7 +3,8 @@
While the <> will analyze the query
string before executing, the _term-level queries_ operate on the exact terms
-that are stored in the inverted index.
+that are stored in the inverted index, and will normalize terms before executing
+only for <> fields with <> property.
These queries are usually used for structured data like numbers, dates, and
enums, rather than full text fields. Alternatively, they allow you to craft
diff --git a/docs/src/test/cluster/config/userdict_ko.txt b/docs/src/test/cluster/config/userdict_ko.txt
new file mode 100644
index 00000000000..63c1c3a1e22
--- /dev/null
+++ b/docs/src/test/cluster/config/userdict_ko.txt
@@ -0,0 +1,5 @@
+# Additional nouns
+c++
+C샤프
+세종
+세종시 세종 시
\ No newline at end of file
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
index 5113f8a6eb4..befd26296a5 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
@@ -130,6 +130,8 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
filters.put("brazilianstem", BrazilianStemTokenFilterFactory.class);
filters.put("czechstem", CzechStemTokenFilterFactory.class);
filters.put("germanstem", GermanStemTokenFilterFactory.class);
+ // this filter is not exposed and should only be used internally
+ filters.put("fixedshingle", Void.class);
return filters;
}
diff --git a/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1
deleted file mode 100644
index 62a094a8b0f..00000000000
--- a/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cb82d9db3043bbd25b4d0eb5022ed1e529c936d3
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1ed95c097b.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..8303f69b87d
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+63ff4af3504881744695f6239fcb3e9c0e3240b1
\ No newline at end of file
diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle
index 123db9fc4a5..4d6f9310cdd 100644
--- a/plugins/analysis-icu/build.gradle
+++ b/plugins/analysis-icu/build.gradle
@@ -30,7 +30,7 @@ forbiddenApis {
dependencies {
compile "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}"
- compile 'com.ibm.icu:icu4j:59.1'
+ compile 'com.ibm.icu:icu4j:61.1'
}
dependencyLicenses {
diff --git a/plugins/analysis-icu/licenses/icu4j-59.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-59.1.jar.sha1
deleted file mode 100644
index 5401f914f58..00000000000
--- a/plugins/analysis-icu/licenses/icu4j-59.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6f06e820cf4c8968bbbaae66ae0b33f6a256b57f
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1
new file mode 100644
index 00000000000..7b7fcfe1c79
--- /dev/null
+++ b/plugins/analysis-icu/licenses/icu4j-61.1.jar.sha1
@@ -0,0 +1 @@
+28d33b5e44e72edcc66a5da7a34a42147f38d987
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1
deleted file mode 100644
index de70972e975..00000000000
--- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c09216a18658d5b2912566efff8665e45edc24b4
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..efcbe94ca18
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+5f3c053ef858c58c74a687a40f5451d19b69850b
\ No newline at end of file
diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
index fa1999cf17e..84c611c0f81 100644
--- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
+++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
@@ -89,9 +89,9 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory {
// cjkAsWords nor myanmarAsWords are not configurable yet.
ICUTokenizerConfig config = new DefaultICUTokenizerConfig(true, true) {
@Override
- public BreakIterator getBreakIterator(int script) {
+ public RuleBasedBreakIterator getBreakIterator(int script) {
if (breakers[script] != null) {
- return (BreakIterator) breakers[script].clone();
+ return (RuleBasedBreakIterator) breakers[script].clone();
} else {
return super.getBreakIterator(script);
}
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1
deleted file mode 100644
index 40ff3efe264..00000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c9d5bbd0affa90b46e173c762c35419a54977c35
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..04f81d14df0
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+a6e72085f7c2ade43ec0e5f52c227e6f715666ad
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yml b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_nori/10_basic.yml
similarity index 100%
rename from plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yml
rename to plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_nori/10_basic.yml
diff --git a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_nori/20_search.yml
similarity index 100%
rename from plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml
rename to plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_nori/20_search.yml
diff --git a/plugins/analysis-nori/build.gradle b/plugins/analysis-nori/build.gradle
new file mode 100644
index 00000000000..a9d3a1126dc
--- /dev/null
+++ b/plugins/analysis-nori/build.gradle
@@ -0,0 +1,32 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+esplugin {
+ description 'The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.'
+ classname 'org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin'
+}
+
+dependencies {
+ compile "org.apache.lucene:lucene-analyzers-nori:${versions.lucene}"
+}
+
+dependencyLicenses {
+ mapping from: /lucene-.*/, to: 'lucene'
+}
+
diff --git a/plugins/analysis-nori/licenses/lucene-LICENSE.txt b/plugins/analysis-nori/licenses/lucene-LICENSE.txt
new file mode 100644
index 00000000000..28b134f5f8e
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-LICENSE.txt
@@ -0,0 +1,475 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+
+Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
+derived from unicode conversion examples available at
+http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright
+from those sources:
+
+/*
+ * Copyright 2001-2004 Unicode, Inc.
+ *
+ * Disclaimer
+ *
+ * This source code is provided as is by Unicode, Inc. No claims are
+ * made as to fitness for any particular purpose. No warranties of any
+ * kind are expressed or implied. The recipient agrees to determine
+ * applicability of information provided. If this file has been
+ * purchased on magnetic or optical media from Unicode, Inc., the
+ * sole remedy for any claim will be exchange of defective media
+ * within 90 days of receipt.
+ *
+ * Limitations on Rights to Redistribute This Code
+ *
+ * Unicode, Inc. hereby grants the right to freely use the information
+ * supplied in this file in the creation of products supporting the
+ * Unicode Standard, and to make copies of this file in any form
+ * for internal or external distribution as long as this notice
+ * remains attached.
+ */
+
+
+Some code in core/src/java/org/apache/lucene/util/ArrayUtil.java was
+derived from Python 2.4.2 sources available at
+http://www.python.org. Full license is here:
+
+ http://www.python.org/download/releases/2.4.2/license/
+
+Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
+derived from Python 3.1.2 sources available at
+http://www.python.org. Full license is here:
+
+ http://www.python.org/download/releases/3.1.2/license/
+
+Some code in core/src/java/org/apache/lucene/util/automaton was
+derived from Brics automaton sources available at
+www.brics.dk/automaton/. Here is the copyright from those sources:
+
+/*
+ * Copyright (c) 2001-2009 Anders Moeller
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+The levenshtein automata tables in core/src/java/org/apache/lucene/util/automaton
+were automatically generated with the moman/finenight FSA package.
+Here is the copyright for those sources:
+
+# Copyright (c) 2010, Jean-Philippe Barrette-LaPierre,
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
+derived from ICU (http://www.icu-project.org)
+The full license is available here:
+ http://source.icu-project.org/repos/icu/icu/trunk/license.html
+
+/*
+ * Copyright (C) 1999-2010, International Business Machines
+ * Corporation and others. All Rights Reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, and/or sell copies of the
+ * Software, and to permit persons to whom the Software is furnished to do so,
+ * provided that the above copyright notice(s) and this permission notice appear
+ * in all copies of the Software and that both the above copyright notice(s) and
+ * this permission notice appear in supporting documentation.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
+ * LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
+ * ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+ * IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+ * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ *
+ * Except as contained in this notice, the name of a copyright holder shall not
+ * be used in advertising or otherwise to promote the sale, use or other
+ * dealings in this Software without prior written authorization of the
+ * copyright holder.
+ */
+
+The following license applies to the Snowball stemmers:
+
+Copyright (c) 2001, Dr Martin Porter
+Copyright (c) 2002, Richard Boulton
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * Neither the name of the copyright holders nor the names of its contributors
+ * may be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The following license applies to the KStemmer:
+
+Copyright © 2003,
+Center for Intelligent Information Retrieval,
+University of Massachusetts, Amherst.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+3. The names "Center for Intelligent Information Retrieval" and
+"University of Massachusetts" must not be used to endorse or promote products
+derived from this software without prior written permission. To obtain
+permission, contact info@ciir.cs.umass.edu.
+
+THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+The following license applies to the Morfologik project:
+
+Copyright (c) 2006 Dawid Weiss
+Copyright (c) 2007-2011 Dawid Weiss, Marcin Miłkowski
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ * Neither the name of Morfologik nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+---
+
+The dictionary comes from Morfologik project. Morfologik uses data from
+Polish ispell/myspell dictionary hosted at http://www.sjp.pl/slownik/en/ and
+is licenced on the terms of (inter alia) LGPL and Creative Commons
+ShareAlike. The part-of-speech tags were added in Morfologik project and
+are not found in the data from sjp.pl. The tagset is similar to IPI PAN
+tagset.
+
+---
+
+The following license applies to the Morfeusz project,
+used by org.apache.lucene.analysis.morfologik.
+
+BSD-licensed dictionary of Polish (SGJP)
+http://sgjp.pl/morfeusz/
+
+Copyright © 2011 Zygmunt Saloni, Włodzimierz Gruszczyński,
+ Marcin Woliński, Robert Wołosz
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the
+ distribution.
+
+THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS “AS IS” AND ANY EXPRESS
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/plugins/analysis-nori/licenses/lucene-NOTICE.txt b/plugins/analysis-nori/licenses/lucene-NOTICE.txt
new file mode 100644
index 00000000000..4970d207895
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-NOTICE.txt
@@ -0,0 +1,204 @@
+Apache Lucene
+Copyright 2001-2018 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+Includes software from other Apache Software Foundation projects,
+including, but not limited to:
+ - Apache Ant
+ - Apache Jakarta Regexp
+ - Apache Commons
+ - Apache Xerces
+
+ICU4J, (under analysis/icu) is licensed under an MIT styles license
+and Copyright (c) 1995-2008 International Business Machines Corporation and others
+
+Some data files (under analysis/icu/src/data) are derived from Unicode data such
+as the Unicode Character Database. See http://unicode.org/copyright.html for more
+details.
+
+Brics Automaton (under core/src/java/org/apache/lucene/util/automaton) is
+BSD-licensed, created by Anders Møller. See http://www.brics.dk/automaton/
+
+The levenshtein automata tables (under core/src/java/org/apache/lucene/util/automaton) were
+automatically generated with the moman/finenight FSA library, created by
+Jean-Philippe Barrette-LaPierre. This library is available under an MIT license,
+see http://sites.google.com/site/rrettesite/moman and
+http://bitbucket.org/jpbarrette/moman/overview/
+
+The class org.apache.lucene.util.WeakIdentityMap was derived from
+the Apache CXF project and is Apache License 2.0.
+
+The Google Code Prettify is Apache License 2.0.
+See http://code.google.com/p/google-code-prettify/
+
+JUnit (junit-4.10) is licensed under the Common Public License v. 1.0
+See http://junit.sourceforge.net/cpl-v10.html
+
+This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin
+g Package (jaspell): http://jaspell.sourceforge.net/
+License: The BSD License (http://www.opensource.org/licenses/bsd-license.php)
+
+The snowball stemmers in
+ analysis/common/src/java/net/sf/snowball
+were developed by Martin Porter and Richard Boulton.
+The snowball stopword lists in
+ analysis/common/src/resources/org/apache/lucene/analysis/snowball
+were developed by Martin Porter and Richard Boulton.
+The full snowball package is available from
+ http://snowball.tartarus.org/
+
+The KStem stemmer in
+ analysis/common/src/org/apache/lucene/analysis/en
+was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst)
+under the BSD-license.
+
+The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default
+stopword list that is BSD-licensed created by Jacques Savoy. These files reside in:
+analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt,
+analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt,
+analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt,
+analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt,
+analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt,
+analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt
+See http://members.unine.ch/jacques.savoy/clef/index.html.
+
+The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers
+(common) are based on BSD-licensed reference implementations created by Jacques Savoy and
+Ljiljana Dolamic. These files reside in:
+analysis/common/src/java/org/apache/lucene/analysis/de/GermanLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/de/GermanMinimalStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/es/SpanishLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchMinimalStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/it/ItalianLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/ru/RussianLightStemmer.java
+analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishLightStemmer.java
+
+The Stempel analyzer (stempel) includes BSD-licensed software developed
+by the Egothor project http://egothor.sf.net/, created by Leo Galambos, Martin Kvapil,
+and Edmond Nolan.
+
+The Polish analyzer (stempel) comes with a default
+stopword list that is BSD-licensed created by the Carrot2 project. The file resides
+in stempel/src/resources/org/apache/lucene/analysis/pl/stopwords.txt.
+See http://project.carrot2.org/license.html.
+
+The SmartChineseAnalyzer source code (smartcn) was
+provided by Xiaoping Gao and copyright 2009 by www.imdict.net.
+
+WordBreakTestUnicode_*.java (under modules/analysis/common/src/test/)
+is derived from Unicode data such as the Unicode Character Database.
+See http://unicode.org/copyright.html for more details.
+
+The Morfologik analyzer (morfologik) includes BSD-licensed software
+developed by Dawid Weiss and Marcin Miłkowski (http://morfologik.blogspot.com/).
+
+Morfologik uses data from Polish ispell/myspell dictionary
+(http://www.sjp.pl/slownik/en/) licenced on the terms of (inter alia)
+LGPL and Creative Commons ShareAlike.
+
+Morfologic includes data from BSD-licensed dictionary of Polish (SGJP)
+(http://sgjp.pl/morfeusz/)
+
+Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original
+source code for this can be found at http://www.eclipse.org/jetty/downloads.php
+
+===========================================================================
+Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration
+===========================================================================
+
+This software includes a binary and/or source version of data from
+
+ mecab-ipadic-2.7.0-20070801
+
+which can be obtained from
+
+ http://atilika.com/releases/mecab-ipadic/mecab-ipadic-2.7.0-20070801.tar.gz
+
+or
+
+ http://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/mecab-ipadic-2.7.0-20070801.tar.gz
+
+===========================================================================
+mecab-ipadic-2.7.0-20070801 Notice
+===========================================================================
+
+Nara Institute of Science and Technology (NAIST),
+the copyright holders, disclaims all warranties with regard to this
+software, including all implied warranties of merchantability and
+fitness, in no event shall NAIST be liable for
+any special, indirect or consequential damages or any damages
+whatsoever resulting from loss of use, data or profits, whether in an
+action of contract, negligence or other tortuous action, arising out
+of or in connection with the use or performance of this software.
+
+A large portion of the dictionary entries
+originate from ICOT Free Software. The following conditions for ICOT
+Free Software applies to the current dictionary as well.
+
+Each User may also freely distribute the Program, whether in its
+original form or modified, to any third party or parties, PROVIDED
+that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
+on, or be attached to, the Program, which is distributed substantially
+in the same form as set out herein and that such intended
+distribution, if actually made, will neither violate or otherwise
+contravene any of the laws and regulations of the countries having
+jurisdiction over the User or the intended distribution itself.
+
+NO WARRANTY
+
+The program was produced on an experimental basis in the course of the
+research and development conducted during the project and is provided
+to users as so produced on an experimental basis. Accordingly, the
+program is provided without any warranty whatsoever, whether express,
+implied, statutory or otherwise. The term "warranty" used herein
+includes, but is not limited to, any warranty of the quality,
+performance, merchantability and fitness for a particular purpose of
+the program and the nonexistence of any infringement or violation of
+any right of any third party.
+
+Each user of the program will agree and understand, and be deemed to
+have agreed and understood, that there is no warranty whatsoever for
+the program and, accordingly, the entire risk arising from or
+otherwise connected with the program is assumed by the user.
+
+Therefore, neither ICOT, the copyright holder, or any other
+organization that participated in or was otherwise related to the
+development of the program and their respective officials, directors,
+officers and other employees shall be held liable for any and all
+damages, including, without limitation, general, special, incidental
+and consequential damages, arising out of or otherwise in connection
+with the use or inability to use the program or any product, material
+or result produced or otherwise obtained by using the program,
+regardless of whether they have been advised of, or otherwise had
+knowledge of, the possibility of such damages at any time during the
+project or thereafter. Each user will be deemed to have agreed to the
+foregoing by his or her commencement of use of the program. The term
+"use" as used herein includes, but is not limited to, the use,
+modification, copying and distribution of the program and the
+production of secondary products from the program.
+
+In the case where the program, whether in its original form or
+modified, was distributed or delivered to or received by a user from
+any person, organization or entity other than ICOT, unless it makes or
+grants independently of ICOT any specific warranty to the user in
+writing, such person, organization or entity, will also be exempted
+from and not be held liable to the user for any such damages as noted
+above as far as the program is concerned.
+
+===========================================================================
+Nori Korean Morphological Analyzer - Apache Lucene Integration
+===========================================================================
+
+This software includes a binary and/or source version of data from
+
+ mecab-ko-dic-2.0.3-20170922
+
+which can be obtained from
+
+ https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/mecab-ko-dic-2.0.3-20170922.tar.gz
diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..b10ae670df5
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+a7daed3dc3a67674862002f315cd9193944de783
\ No newline at end of file
diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriAnalyzerProvider.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriAnalyzerProvider.java
new file mode 100644
index 00000000000..f85c3f94e34
--- /dev/null
+++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriAnalyzerProvider.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import java.util.List;
+import java.util.Set;
+import org.apache.lucene.analysis.ko.KoreanAnalyzer;
+import org.apache.lucene.analysis.ko.KoreanPartOfSpeechStopFilter;
+import org.apache.lucene.analysis.ko.KoreanTokenizer;
+import org.apache.lucene.analysis.ko.dict.UserDictionary;
+import org.apache.lucene.analysis.ko.POS;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+import static org.elasticsearch.index.analysis.NoriPartOfSpeechStopFilterFactory.resolvePOSList;
+
+
+public class NoriAnalyzerProvider extends AbstractIndexAnalyzerProvider {
+ private final KoreanAnalyzer analyzer;
+
+ public NoriAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
+ super(indexSettings, name, settings);
+ final KoreanTokenizer.DecompoundMode mode = NoriTokenizerFactory.getMode(settings);
+ final UserDictionary userDictionary = NoriTokenizerFactory.getUserDictionary(env, settings);
+ final List tagList = Analysis.getWordList(env, settings, "stoptags");
+ final Set stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS;
+ analyzer = new KoreanAnalyzer(userDictionary, mode, stopTags, false);
+ }
+
+ @Override
+ public KoreanAnalyzer get() {
+ return analyzer;
+ }
+
+
+}
diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java
new file mode 100644
index 00000000000..d893c35cefb
--- /dev/null
+++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.ko.KoreanPartOfSpeechStopFilter;
+import org.apache.lucene.analysis.ko.POS;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+public class NoriPartOfSpeechStopFilterFactory extends AbstractTokenFilterFactory {
+ private final Set stopTags;
+
+ public NoriPartOfSpeechStopFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
+ super(indexSettings, name, settings);
+ List tagList = Analysis.getWordList(env, settings, "stoptags");
+ this.stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS;
+ }
+
+ @Override
+ public TokenStream create(TokenStream tokenStream) {
+ return new KoreanPartOfSpeechStopFilter(tokenStream, stopTags);
+ }
+
+
+ static Set resolvePOSList(List tagList) {
+ Set stopTags = new HashSet<>();
+ for (String tag : tagList) {
+ stopTags.add(POS.resolveTag(tag));
+ }
+ return stopTags;
+ }
+}
diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriReadingFormFilterFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriReadingFormFilterFactory.java
new file mode 100644
index 00000000000..aac6003c1b7
--- /dev/null
+++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriReadingFormFilterFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.ko.KoreanReadingFormFilter;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+public class NoriReadingFormFilterFactory extends AbstractTokenFilterFactory {
+ public NoriReadingFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
+ super(indexSettings, name, settings);
+ }
+
+ @Override
+ public TokenStream create(TokenStream tokenStream) {
+ return new KoreanReadingFormFilter(tokenStream);
+ }
+}
diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java
new file mode 100644
index 00000000000..346cc84e5e6
--- /dev/null
+++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.ko.KoreanTokenizer;
+import org.apache.lucene.analysis.ko.dict.UserDictionary;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Locale;
+
+public class NoriTokenizerFactory extends AbstractTokenizerFactory {
+ private static final String USER_DICT_OPTION = "user_dictionary";
+
+ private final UserDictionary userDictionary;
+ private final KoreanTokenizer.DecompoundMode decompoundMode;
+
+ public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
+ super(indexSettings, name, settings);
+ decompoundMode = getMode(settings);
+ userDictionary = getUserDictionary(env, settings);
+ }
+
+ public static UserDictionary getUserDictionary(Environment env, Settings settings) {
+ try (Reader reader = Analysis.getReaderFromFile(env, settings, USER_DICT_OPTION)) {
+ if (reader == null) {
+ return null;
+ } else {
+ return UserDictionary.open(reader);
+ }
+ } catch (IOException e) {
+ throw new ElasticsearchException("failed to load nori user dictionary", e);
+ }
+ }
+
+ public static KoreanTokenizer.DecompoundMode getMode(Settings settings) {
+ KoreanTokenizer.DecompoundMode mode = KoreanTokenizer.DEFAULT_DECOMPOUND;
+ String modeSetting = settings.get("decompound_mode", null);
+ if (modeSetting != null) {
+ mode = KoreanTokenizer.DecompoundMode.valueOf(modeSetting.toUpperCase(Locale.ENGLISH));
+ }
+ return mode;
+ }
+
+ @Override
+ public Tokenizer create() {
+ return new KoreanTokenizer(KoreanTokenizer.DEFAULT_TOKEN_ATTRIBUTE_FACTORY, userDictionary, decompoundMode, false);
+ }
+
+}
diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/AnalysisNoriPlugin.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/AnalysisNoriPlugin.java
new file mode 100644
index 00000000000..6e9baa7acd2
--- /dev/null
+++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/AnalysisNoriPlugin.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.plugin.analysis.nori;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.elasticsearch.index.analysis.AnalyzerProvider;
+import org.elasticsearch.index.analysis.NoriAnalyzerProvider;
+import org.elasticsearch.index.analysis.NoriPartOfSpeechStopFilterFactory;
+import org.elasticsearch.index.analysis.NoriReadingFormFilterFactory;
+import org.elasticsearch.index.analysis.NoriTokenizerFactory;
+import org.elasticsearch.index.analysis.TokenFilterFactory;
+import org.elasticsearch.index.analysis.TokenizerFactory;
+import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
+import org.elasticsearch.plugins.AnalysisPlugin;
+import org.elasticsearch.plugins.Plugin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static java.util.Collections.singletonMap;
+
+public class AnalysisNoriPlugin extends Plugin implements AnalysisPlugin {
+ @Override
+ public Map> getTokenFilters() {
+ Map> extra = new HashMap<>();
+ extra.put("nori_part_of_speech", NoriPartOfSpeechStopFilterFactory::new);
+ extra.put("nori_readingform", NoriReadingFormFilterFactory::new);
+ return extra;
+ }
+
+ @Override
+ public Map> getTokenizers() {
+ return singletonMap("nori_tokenizer", NoriTokenizerFactory::new);
+ }
+
+ @Override
+ public Map>> getAnalyzers() {
+ return singletonMap("nori", NoriAnalyzerProvider::new);
+ }
+}
diff --git a/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/AnalysisNoriFactoryTests.java b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/AnalysisNoriFactoryTests.java
new file mode 100644
index 00000000000..1677ba94b87
--- /dev/null
+++ b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/AnalysisNoriFactoryTests.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.ko.KoreanTokenizerFactory;
+import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
+import org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class AnalysisNoriFactoryTests extends AnalysisFactoryTestCase {
+ public AnalysisNoriFactoryTests() {
+ super(new AnalysisNoriPlugin());
+ }
+
+ @Override
+ protected Map> getTokenizers() {
+ Map> tokenizers = new HashMap<>(super.getTokenizers());
+ tokenizers.put("korean", KoreanTokenizerFactory.class);
+ return tokenizers;
+ }
+
+ @Override
+ protected Map> getTokenFilters() {
+ Map> filters = new HashMap<>(super.getTokenFilters());
+ filters.put("koreanpartofspeechstop", NoriPartOfSpeechStopFilterFactory.class);
+ filters.put("koreanreadingform", NoriReadingFormFilterFactory.class);
+ return filters;
+ }
+}
diff --git a/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriAnalysisTests.java b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriAnalysisTests.java
new file mode 100644
index 00000000000..fa5858a7bbb
--- /dev/null
+++ b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriAnalysisTests.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.ko.KoreanAnalyzer;
+import org.apache.lucene.analysis.ko.KoreanTokenizer;
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin;
+import org.elasticsearch.test.ESTestCase.TestAnalysis;
+import org.elasticsearch.test.ESTokenStreamTestCase;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringReader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+public class NoriAnalysisTests extends ESTokenStreamTestCase {
+ public void testDefaultsNoriAnalysis() throws IOException {
+ TestAnalysis analysis = createTestAnalysis(Settings.EMPTY);
+
+ TokenizerFactory tokenizerFactory = analysis.tokenizer.get("nori_tokenizer");
+ assertThat(tokenizerFactory, instanceOf(NoriTokenizerFactory.class));
+
+ TokenFilterFactory filterFactory = analysis.tokenFilter.get("nori_part_of_speech");
+ assertThat(filterFactory, instanceOf(NoriPartOfSpeechStopFilterFactory.class));
+
+ filterFactory = analysis.tokenFilter.get("nori_readingform");
+ assertThat(filterFactory, instanceOf(NoriReadingFormFilterFactory.class));
+
+ IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
+ NamedAnalyzer analyzer = indexAnalyzers.get("nori");
+ assertThat(analyzer.analyzer(), instanceOf(KoreanAnalyzer.class));
+ }
+
+ public void testNoriAnalyzer() throws Exception {
+ Settings settings = Settings.builder()
+ .put("index.analysis.analyzer.my_analyzer.type", "nori")
+ .put("index.analysis.analyzer.my_analyzer.stoptags", "NR, SP")
+ .put("index.analysis.analyzer.my_analyzer.decompound_mode", "mixed")
+ .build();
+ TestAnalysis analysis = createTestAnalysis(settings);
+ Analyzer analyzer = analysis.indexAnalyzers.get("my_analyzer");
+ try (TokenStream stream = analyzer.tokenStream("", "여섯 용이" )) {
+ assertTokenStreamContents(stream, new String[] {"용", "이"});
+ }
+
+ try (TokenStream stream = analyzer.tokenStream("", "가늠표")) {
+ assertTokenStreamContents(stream, new String[] {"가늠표", "가늠", "표"});
+ }
+ }
+
+ public void testNoriAnalyzerUserDict() throws Exception {
+ Settings settings = Settings.builder()
+ .put("index.analysis.analyzer.my_analyzer.type", "nori")
+ .put("index.analysis.analyzer.my_analyzer.user_dictionary", "user_dict.txt")
+ .build();
+ TestAnalysis analysis = createTestAnalysis(settings);
+ Analyzer analyzer = analysis.indexAnalyzers.get("my_analyzer");
+ try (TokenStream stream = analyzer.tokenStream("", "세종시" )) {
+ assertTokenStreamContents(stream, new String[] {"세종", "시"});
+ }
+
+ try (TokenStream stream = analyzer.tokenStream("", "c++world")) {
+ assertTokenStreamContents(stream, new String[] {"c++", "world"});
+ }
+ }
+
+ public void testNoriTokenizer() throws Exception {
+ Settings settings = Settings.builder()
+ .put("index.analysis.tokenizer.my_tokenizer.type", "nori_tokenizer")
+ .put("index.analysis.tokenizer.my_tokenizer.decompound_mode", "mixed")
+ .build();
+ TestAnalysis analysis = createTestAnalysis(settings);
+ Tokenizer tokenizer = analysis.tokenizer.get("my_tokenizer").create();
+ tokenizer.setReader(new StringReader("뿌리가 깊은 나무"));
+ assertTokenStreamContents(tokenizer, new String[] {"뿌리", "가", "깊", "은", "나무"});
+ tokenizer.setReader(new StringReader("가늠표"));
+ assertTokenStreamContents(tokenizer, new String[] {"가늠표", "가늠", "표"});
+ }
+
+ public void testNoriPartOfSpeech() throws IOException {
+ Settings settings = Settings.builder()
+ .put("index.analysis.filter.my_filter.type", "nori_part_of_speech")
+ .put("index.analysis.filter.my_filter.stoptags", "NR, SP")
+ .build();
+ TestAnalysis analysis = createTestAnalysis(settings);
+ TokenFilterFactory factory = analysis.tokenFilter.get("my_filter");
+ Tokenizer tokenizer = new KoreanTokenizer();
+ tokenizer.setReader(new StringReader("여섯 용이"));
+ TokenStream stream = factory.create(tokenizer);
+ assertTokenStreamContents(stream, new String[] {"용", "이"});
+ }
+
+ public void testNoriReadingForm() throws IOException {
+ Settings settings = Settings.builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put("index.analysis.filter.my_filter.type", "nori_readingform")
+ .build();
+ TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new AnalysisNoriPlugin());
+ TokenFilterFactory factory = analysis.tokenFilter.get("my_filter");
+ Tokenizer tokenizer = new KoreanTokenizer();
+ tokenizer.setReader(new StringReader("鄕歌"));
+ TokenStream stream = factory.create(tokenizer);
+ assertTokenStreamContents(stream, new String[] {"향가"});
+ }
+
+ private TestAnalysis createTestAnalysis(Settings analysisSettings) throws IOException {
+ InputStream dict = NoriAnalysisTests.class.getResourceAsStream("user_dict.txt");
+ Path home = createTempDir();
+ Path config = home.resolve("config");
+ Files.createDirectory(config);
+ Files.copy(dict, config.resolve("user_dict.txt"));
+ Settings settings = Settings.builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
+ .put(Environment.PATH_HOME_SETTING.getKey(), home)
+ .put(analysisSettings)
+ .build();
+ return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new AnalysisNoriPlugin());
+ }
+}
diff --git a/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriClientYamlTestSuiteIT.java b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriClientYamlTestSuiteIT.java
new file mode 100644
index 00000000000..5c393f617a8
--- /dev/null
+++ b/plugins/analysis-nori/src/test/java/org/elasticsearch/index/analysis/NoriClientYamlTestSuiteIT.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
+import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
+import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
+
+public class NoriClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
+
+ public NoriClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
+ super(testCandidate);
+ }
+
+ @ParametersFactory
+ public static Iterable parameters() throws Exception {
+ return ESClientYamlSuiteTestCase.createParameters();
+ }
+}
+
diff --git a/plugins/analysis-nori/src/test/resources/org/elasticsearch/index/analysis/user_dict.txt b/plugins/analysis-nori/src/test/resources/org/elasticsearch/index/analysis/user_dict.txt
new file mode 100644
index 00000000000..63c1c3a1e22
--- /dev/null
+++ b/plugins/analysis-nori/src/test/resources/org/elasticsearch/index/analysis/user_dict.txt
@@ -0,0 +1,5 @@
+# Additional nouns
+c++
+C샤프
+세종
+세종시 세종 시
\ No newline at end of file
diff --git a/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/10_basic.yml b/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/10_basic.yml
new file mode 100644
index 00000000000..a5aa9998da6
--- /dev/null
+++ b/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/10_basic.yml
@@ -0,0 +1,48 @@
+# Integration tests for Korean analysis components
+#
+---
+"Analyzer":
+ - do:
+ indices.analyze:
+ body:
+ text: 뿌리가 깊은 나무
+ analyzer: nori
+ - length: { tokens: 3 }
+ - match: { tokens.0.token: 뿌리 }
+ - match: { tokens.1.token: 깊 }
+ - match: { tokens.2.token: 나무 }
+---
+"Tokenizer":
+ - do:
+ indices.analyze:
+ body:
+ text: 뿌리가 깊은 나무
+ tokenizer: nori_tokenizer
+ - length: { tokens: 5 }
+ - match: { tokens.0.token: 뿌리 }
+ - match: { tokens.1.token: 가 }
+ - match: { tokens.2.token: 깊 }
+ - match: { tokens.3.token: 은 }
+ - match: { tokens.4.token: 나무 }
+---
+"Part of speech filter":
+ - do:
+ indices.analyze:
+ body:
+ text: 뿌리가 깊은 나무
+ tokenizer: nori_tokenizer
+ filter: [nori_part_of_speech]
+ - length: { tokens: 3 }
+ - match: { tokens.0.token: 뿌리 }
+ - match: { tokens.1.token: 깊 }
+ - match: { tokens.2.token: 나무 }
+---
+"Reading filter":
+ - do:
+ indices.analyze:
+ body:
+ text: 鄕歌
+ tokenizer: nori_tokenizer
+ filter: [nori_readingform]
+ - length: { tokens: 1 }
+ - match: { tokens.0.token: 향가 }
diff --git a/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/20_search.yml b/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/20_search.yml
new file mode 100644
index 00000000000..cfb0ec5ee94
--- /dev/null
+++ b/plugins/analysis-nori/src/test/resources/rest-api-spec/test/analysis_nori/20_search.yml
@@ -0,0 +1,32 @@
+# Integration tests for Korean analysis components
+#
+---
+"Index Korean content":
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ type:
+ properties:
+ text:
+ type: text
+ analyzer: nori
+
+ - do:
+ index:
+ index: test
+ type: type
+ id: 1
+ body: { "text": "뿌리가 깊은 나무는" }
+ - do:
+ indices.refresh: {}
+
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ match:
+ text: 나무
+ - match: { hits.total: 1 }
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1
deleted file mode 100644
index 9442635addd..00000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4e6c63fa8ae005d81d12f0d88ffa98346b443ac4
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..76728498136
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+25c93466d0a2c41df0cf98de77d632f3f02fa98d
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1
deleted file mode 100644
index 780824c4d45..00000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-37b7ff0a6493f139cb77f5bda965ac0189c8efd1
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..509f08ed310
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+4688aaa48607ac26f6bf2567052019ab3fb2ff5e
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1
deleted file mode 100644
index ba241e6a099..00000000000
--- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d189185da23b2221c4d532da5e2cacce735f8a0c
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..a6dc434b03a
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+ad71de632c9363c3f200cd5a240686256c7db431
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1
deleted file mode 100644
index fb7e5befe47..00000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-74462b51de45afe708f1042cc901fe7370413871
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1ed95c097b.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..967957ac0ff
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+96a630a7c4916358f129f6bac8718108811efe1a
\ No newline at end of file
diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/NoHandlerIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/NoHandlerIT.java
new file mode 100644
index 00000000000..0a2d7ed9b06
--- /dev/null
+++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/NoHandlerIT.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.http;
+
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.client.ResponseException;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+
+public class NoHandlerIT extends HttpSmokeTestCase {
+
+ public void testNoHandlerRespectsAcceptHeader() throws IOException {
+ runTestNoHandlerRespectsAcceptHeader(
+ "application/json",
+ "application/json; charset=UTF-8",
+ "\"error\":\"no handler found for uri [/foo/bar/baz/qux/quux] and method [GET]\"");
+ runTestNoHandlerRespectsAcceptHeader(
+ "application/yaml",
+ "application/yaml",
+ "error: \"no handler found for uri [/foo/bar/baz/qux/quux] and method [GET]\"");
+ }
+
+ private void runTestNoHandlerRespectsAcceptHeader(
+ final String accept, final String contentType, final String expect) throws IOException {
+ final ResponseException e =
+ expectThrows(
+ ResponseException.class,
+ () -> getRestClient().performRequest("GET", "/foo/bar/baz/qux/quux", new BasicHeader("Accept", accept)));
+
+ final Response response = e.getResponse();
+ assertThat(response.getHeader("Content-Type"), equalTo(contentType));
+ assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString(expect));
+ assertThat(response.getStatusLine().getStatusCode(), is(400));
+ }
+
+}
diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
index a62d690897e..79228adfee5 100644
--- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
+++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash
@@ -188,6 +188,10 @@ fi
install_and_check_plugin analysis kuromoji
}
+@test "[$GROUP] install nori plugin" {
+ install_and_check_plugin analysis nori
+}
+
@test "[$GROUP] install phonetic plugin" {
install_and_check_plugin analysis phonetic commons-codec-*.jar
}
@@ -320,6 +324,10 @@ fi
remove_plugin analysis-kuromoji
}
+@test "[$GROUP] remove nori plugin" {
+ remove_plugin analysis-nori
+}
+
@test "[$GROUP] remove phonetic plugin" {
remove_plugin analysis-phonetic
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
index 706cce5277a..ed22cc837d6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json
@@ -16,6 +16,10 @@
}
},
"params": {
+ "master_timeout": {
+ "type": "time",
+ "description": "Specify timeout for connection to master"
+ },
"ignore_unavailable": {
"type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
diff --git a/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1
deleted file mode 100644
index 5a50f9dd77f..00000000000
--- a/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4325a5cdf8d3fa23f326cd86a2297fee2bc844f5
\ No newline at end of file
diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..ad3125768ab
--- /dev/null
+++ b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+2b2be48f6622c150496e755497e7bdb8daa46030
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1
deleted file mode 100644
index 309f301ad8c..00000000000
--- a/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3b618a21a924cb35ac1f27d3ca47d9ed04f43588
\ No newline at end of file
diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..083cc9b1e39
--- /dev/null
+++ b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+6cbafc48e8ac4966377665eb3bbe93f9addf04a5
\ No newline at end of file
diff --git a/server/licenses/lucene-core-7.3.0.jar.sha1 b/server/licenses/lucene-core-7.3.0.jar.sha1
deleted file mode 100644
index e12c932b38d..00000000000
--- a/server/licenses/lucene-core-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-040e2de30c5e6bad868b144e371730200719ceb3
\ No newline at end of file
diff --git a/server/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..20da7d0f78e
--- /dev/null
+++ b/server/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+0b06e4f6514256a3f187a9892e520638b9c59e63
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-7.3.0.jar.sha1 b/server/licenses/lucene-grouping-7.3.0.jar.sha1
deleted file mode 100644
index 703384a64de..00000000000
--- a/server/licenses/lucene-grouping-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-20a5c472a8be9bec7aa40472791389e875b9e1f2
\ No newline at end of file
diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..cb3b6eab5db
--- /dev/null
+++ b/server/licenses/lucene-grouping-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+4c71cef87fe513a7a96c2a7980ed6f7c2b015763
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-7.3.0.jar.sha1 b/server/licenses/lucene-highlighter-7.3.0.jar.sha1
deleted file mode 100644
index 6e38e256063..00000000000
--- a/server/licenses/lucene-highlighter-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1f92c7d3d9bc2765fe6195bcc4fcb160d11175cc
\ No newline at end of file
diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..5ef97ed311d
--- /dev/null
+++ b/server/licenses/lucene-highlighter-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+665e044d1180100940bccd7e8e41dde48e342da3
\ No newline at end of file
diff --git a/server/licenses/lucene-join-7.3.0.jar.sha1 b/server/licenses/lucene-join-7.3.0.jar.sha1
deleted file mode 100644
index d7213d76a62..00000000000
--- a/server/licenses/lucene-join-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-da4af75a7e4fe7843fbfa4b58e6a238b6b706d64
\ No newline at end of file
diff --git a/server/licenses/lucene-join-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..62afc759b6d
--- /dev/null
+++ b/server/licenses/lucene-join-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+d343bbf5792f5969288b59b51179acd29d04f4ee
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-7.3.0.jar.sha1 b/server/licenses/lucene-memory-7.3.0.jar.sha1
deleted file mode 100644
index 6bb4a4d832d..00000000000
--- a/server/licenses/lucene-memory-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fc45b02a5086ec454e6d6ae81fc2cbe7be1c0902
\ No newline at end of file
diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..1c142661864
--- /dev/null
+++ b/server/licenses/lucene-memory-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+8915f3c93af3348655bcc204289f9011835738a2
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-7.3.0.jar.sha1 b/server/licenses/lucene-misc-7.3.0.jar.sha1
deleted file mode 100644
index 43c777150a3..00000000000
--- a/server/licenses/lucene-misc-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6a2418a94b84c29c4b9fcfe4381f2cc1aa4c214
\ No newline at end of file
diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..de39af3dcef
--- /dev/null
+++ b/server/licenses/lucene-misc-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+e7dc67b42eca3b1546a36370b6dcda0f83b2eb7d
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-7.3.0.jar.sha1 b/server/licenses/lucene-queries-7.3.0.jar.sha1
deleted file mode 100644
index b0ef2b4d0eb..00000000000
--- a/server/licenses/lucene-queries-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6292a5579a6ab3423ceca60d2ea41cd86481e7c0
\ No newline at end of file
diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..6d1e73a5abc
--- /dev/null
+++ b/server/licenses/lucene-queries-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+5946d5e2be276f66e9ff6d6111acabb03a9330d9
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-7.3.0.jar.sha1 b/server/licenses/lucene-queryparser-7.3.0.jar.sha1
deleted file mode 100644
index 87a1d74498d..00000000000
--- a/server/licenses/lucene-queryparser-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-95b2563e5337377dde2eb987b3fce144be5e7a77
\ No newline at end of file
diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..ae79d443ff5
--- /dev/null
+++ b/server/licenses/lucene-queryparser-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+d9fc5fc63f3d861e5af72e11373368e8a4c6bba6
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-7.3.0.jar.sha1 b/server/licenses/lucene-sandbox-7.3.0.jar.sha1
deleted file mode 100644
index 605263a2296..00000000000
--- a/server/licenses/lucene-sandbox-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1efd2fa7cba1e359e3fbb8b4c11cab37024b2178
\ No newline at end of file
diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..50022d2e6de
--- /dev/null
+++ b/server/licenses/lucene-sandbox-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+ee283c0a1a717f3e0915de75864a93d043efaee3
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-7.3.0.jar.sha1 b/server/licenses/lucene-spatial-7.3.0.jar.sha1
deleted file mode 100644
index 4fcd32b5d29..00000000000
--- a/server/licenses/lucene-spatial-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-93512c2160bdc3e602141329e5945a91918b6752
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..4371f75994f
--- /dev/null
+++ b/server/licenses/lucene-spatial-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+e1adf0220a7c052ac81e2919ffac24ac0e5b007c
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1
deleted file mode 100644
index 0f078420cdb..00000000000
--- a/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-47090d8ddf99f6bbb64ee8ab7a76c3cd3165b88f
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..a27beb76ff6
--- /dev/null
+++ b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+6d9306053942c48f43392a634f11a95462b5996e
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-7.3.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.3.0.jar.sha1
deleted file mode 100644
index 268ed39a784..00000000000
--- a/server/licenses/lucene-spatial3d-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ed8f07d67445d5acde6597996461640b2d92fa08
\ No newline at end of file
diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..8db76c8605c
--- /dev/null
+++ b/server/licenses/lucene-spatial3d-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+2334e8c5f4d0f98659b30e0c2035296e4aae8ff5
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-7.3.0.jar.sha1 b/server/licenses/lucene-suggest-7.3.0.jar.sha1
deleted file mode 100644
index 798238ce58b..00000000000
--- a/server/licenses/lucene-suggest-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6034ccf6b27c659ab7a2678680bae8390fbfc40a
\ No newline at end of file
diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-1ed95c097b.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..84acdbd0d0b
--- /dev/null
+++ b/server/licenses/lucene-suggest-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+f2b2c454eb7b5d73b9df1390ea4730ce3dd4e463
\ No newline at end of file
diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java
index 5142f1cb84b..896d7ab690e 100644
--- a/server/src/main/java/org/elasticsearch/Version.java
+++ b/server/src/main/java/org/elasticsearch/Version.java
@@ -171,10 +171,10 @@ public class Version implements Comparable, ToXContentFragment {
public static final int V_6_3_0_ID = 6030099;
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
public static final int V_6_4_0_ID = 6040099;
- public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
+ public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_7_0_0_alpha1_ID = 7000001;
public static final Version V_7_0_0_alpha1 =
- new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
+ new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version CURRENT = V_7_0_0_alpha1;
static {
diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java
index 392b307a8aa..42ff4322403 100644
--- a/server/src/main/java/org/elasticsearch/action/ActionModule.java
+++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java
@@ -576,7 +576,7 @@ public class ActionModule extends AbstractModule {
registerHandler.accept(new RestOpenIndexAction(settings, restController));
registerHandler.accept(new RestUpdateSettingsAction(settings, restController));
- registerHandler.accept(new RestGetSettingsAction(settings, restController, indexScopedSettings, settingsFilter));
+ registerHandler.accept(new RestGetSettingsAction(settings, restController));
registerHandler.accept(new RestAnalyzeAction(settings, restController));
registerHandler.accept(new RestGetIndexTemplateAction(settings, restController));
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
index 6d4cb839345..a71f9fd8fd7 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesResponse.java
@@ -20,13 +20,14 @@
package org.elasticsearch.action.admin.cluster.repositories.get;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -34,15 +35,15 @@ import java.util.List;
/**
* Get repositories response
*/
-public class GetRepositoriesResponse extends ActionResponse implements Iterable {
-
- private List repositories = Collections.emptyList();
+public class GetRepositoriesResponse extends ActionResponse implements ToXContentObject {
+ private RepositoriesMetaData repositories;
GetRepositoriesResponse() {
+ repositories = new RepositoriesMetaData(Collections.emptyList());
}
- GetRepositoriesResponse(List repositories) {
+ GetRepositoriesResponse(RepositoriesMetaData repositories) {
this.repositories = repositories;
}
@@ -52,43 +53,25 @@ public class GetRepositoriesResponse extends ActionResponse implements Iterable<
* @return list or repositories
*/
public List repositories() {
- return repositories;
+ return repositories.repositories();
}
@Override
public void readFrom(StreamInput in) throws IOException {
- super.readFrom(in);
- int size = in.readVInt();
- List repositoryListBuilder = new ArrayList<>(size);
- for (int j = 0; j < size; j++) {
- repositoryListBuilder.add(new RepositoryMetaData(
- in.readString(),
- in.readString(),
- Settings.readSettingsFromStream(in))
- );
- }
- repositories = Collections.unmodifiableList(repositoryListBuilder);
+ repositories = new RepositoriesMetaData(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
- super.writeTo(out);
- out.writeVInt(repositories.size());
- for (RepositoryMetaData repository : repositories) {
- out.writeString(repository.name());
- out.writeString(repository.type());
- Settings.writeSettingsToStream(repository.settings(), out);
- }
+ repositories.writeTo(out);
}
- /**
- * Iterator over the repositories data
- *
- * @return iterator over the repositories data
- */
@Override
- public Iterator iterator() {
- return repositories.iterator();
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ repositories.toXContent(builder, params);
+ builder.endObject();
+ return builder;
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java
index 6e7a0ca5cf4..c7474fc28cc 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java
@@ -75,9 +75,9 @@ public class TransportGetRepositoriesAction extends TransportMasterNodeReadActio
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (request.repositories().length == 0 || (request.repositories().length == 1 && "_all".equals(request.repositories()[0]))) {
if (repositories != null) {
- listener.onResponse(new GetRepositoriesResponse(repositories.repositories()));
+ listener.onResponse(new GetRepositoriesResponse(repositories));
} else {
- listener.onResponse(new GetRepositoriesResponse(Collections.emptyList()));
+ listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetaData(Collections.emptyList())));
}
} else {
if (repositories != null) {
@@ -102,7 +102,7 @@ public class TransportGetRepositoriesAction extends TransportMasterNodeReadActio
}
repositoryListBuilder.add(repositoryMetaData);
}
- listener.onResponse(new GetRepositoriesResponse(Collections.unmodifiableList(repositoryListBuilder)));
+ listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetaData(repositoryListBuilder)));
} else {
listener.onFailure(new RepositoryMissingException(request.repositories()[0]));
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java
index 3a84543f340..0c4f63b71fb 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.settings.get;
+import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.ValidateActions;
@@ -29,6 +30,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.Objects;
public class GetSettingsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable {
@@ -36,6 +39,7 @@ public class GetSettingsRequest extends MasterNodeReadRequest indexToSettings = ImmutableOpenMap.of();
+ private ImmutableOpenMap indexToDefaultSettings = ImmutableOpenMap.of();
- public GetSettingsResponse(ImmutableOpenMap indexToSettings) {
+ public GetSettingsResponse(ImmutableOpenMap indexToSettings,
+ ImmutableOpenMap indexToDefaultSettings) {
this.indexToSettings = indexToSettings;
+ this.indexToDefaultSettings = indexToDefaultSettings;
}
GetSettingsResponse() {
}
+ /**
+ * Returns a map of index name to {@link Settings} object. The returned {@link Settings}
+ * objects contain only those settings explicitly set on a given index. Any settings
+ * taking effect as defaults must be accessed via {@link #getIndexToDefaultSettings()}.
+ */
public ImmutableOpenMap getIndexToSettings() {
return indexToSettings;
}
+ /**
+ * If the originating {@link GetSettingsRequest} object was configured to include
+ * defaults, this will contain a mapping of index name to {@link Settings} objects.
+ * The returned {@link Settings} objects will contain only those settings taking
+ * effect as defaults. Any settings explicitly set on the index will be available
+ * via {@link #getIndexToSettings()}.
+ * See also {@link GetSettingsRequest#includeDefaults(boolean)}
+ */
+ public ImmutableOpenMap getIndexToDefaultSettings() {
+ return indexToDefaultSettings;
+ }
+
+ /**
+ * Returns the string value for the specified index and setting. If the includeDefaults
+ * flag was not set or set to false on the GetSettingsRequest, this method will only
+ * return a value where the setting was explicitly set on the index. If the includeDefaults
+ * flag was set to true on the GetSettingsRequest, this method will fall back to return the default
+ * value if the setting was not explicitly set.
+ */
public String getSetting(String index, String setting) {
Settings settings = indexToSettings.get(index);
if (setting != null) {
- return settings.get(setting);
+ if (settings != null && settings.hasValue(setting)) {
+ return settings.get(setting);
+ } else {
+ Settings defaultSettings = indexToDefaultSettings.get(index);
+ if (defaultSettings != null) {
+ return defaultSettings.get(setting);
+ } else {
+ return null;
+ }
+ }
} else {
return null;
}
@@ -55,12 +106,22 @@ public class GetSettingsResponse extends ActionResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- int size = in.readVInt();
- ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder();
- for (int i = 0; i < size; i++) {
- builder.put(in.readString(), Settings.readSettingsFromStream(in));
+
+ int settingsSize = in.readVInt();
+ ImmutableOpenMap.Builder settingsBuilder = ImmutableOpenMap.builder();
+ for (int i = 0; i < settingsSize; i++) {
+ settingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in));
}
- indexToSettings = builder.build();
+ ImmutableOpenMap.Builder defaultSettingsBuilder = ImmutableOpenMap.builder();
+
+ if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
+ int defaultSettingsSize = in.readVInt();
+ for (int i = 0; i < defaultSettingsSize ; i++) {
+ defaultSettingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in));
+ }
+ }
+ indexToSettings = settingsBuilder.build();
+ indexToDefaultSettings = defaultSettingsBuilder.build();
}
@Override
@@ -71,5 +132,121 @@ public class GetSettingsResponse extends ActionResponse {
out.writeString(cursor.key);
Settings.writeSettingsToStream(cursor.value, out);
}
+ if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
+ out.writeVInt(indexToDefaultSettings.size());
+ for (ObjectObjectCursor cursor : indexToDefaultSettings) {
+ out.writeString(cursor.key);
+ Settings.writeSettingsToStream(cursor.value, out);
+ }
+ }
+ }
+
+ private static void parseSettingsField(XContentParser parser, String currentIndexName, Map indexToSettings,
+ Map indexToDefaultSettings) throws IOException {
+
+ if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
+ switch (parser.currentName()) {
+ case "settings":
+ indexToSettings.put(currentIndexName, Settings.fromXContent(parser));
+ break;
+ case "defaults":
+ indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser));
+ break;
+ default:
+ parser.skipChildren();
+ }
+ } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ parser.nextToken();
+ }
+
+ private static void parseIndexEntry(XContentParser parser, Map indexToSettings,
+ Map indexToDefaultSettings) throws IOException {
+ String indexName = parser.currentName();
+ parser.nextToken();
+ while (!parser.isClosed() && parser.currentToken() != XContentParser.Token.END_OBJECT) {
+ parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings);
+ }
+ }
+ public static GetSettingsResponse fromXContent(XContentParser parser) throws IOException {
+ HashMap indexToSettings = new HashMap<>();
+ HashMap indexToDefaultSettings = new HashMap<>();
+
+ if (parser.currentToken() == null) {
+ parser.nextToken();
+ }
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ parser.nextToken();
+
+ while (!parser.isClosed()) {
+ if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
+ //we must assume this is an index entry
+ parseIndexEntry(parser, indexToSettings, indexToDefaultSettings);
+ } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ } else {
+ parser.nextToken();
+ }
+ }
+
+ ImmutableOpenMap settingsMap = ImmutableOpenMap.builder().putAll(indexToSettings).build();
+ ImmutableOpenMap defaultSettingsMap =
+ ImmutableOpenMap.builder().putAll(indexToDefaultSettings).build();
+
+ return new GetSettingsResponse(settingsMap, defaultSettingsMap);
+ }
+
+ @Override
+ public String toString() {
+ try {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, baos);
+ toXContent(builder, ToXContent.EMPTY_PARAMS, false);
+ return Strings.toString(builder);
+ } catch (IOException e) {
+ throw new IllegalStateException(e); //should not be possible here
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ return toXContent(builder, params, indexToDefaultSettings.isEmpty());
+ }
+
+ private XContentBuilder toXContent(XContentBuilder builder, Params params, boolean omitEmptySettings) throws IOException {
+ builder.startObject();
+ for (ObjectObjectCursor cursor : getIndexToSettings()) {
+ // no settings, jump over it to shorten the response data
+ if (omitEmptySettings && cursor.value.isEmpty()) {
+ continue;
+ }
+ builder.startObject(cursor.key);
+ builder.startObject("settings");
+ cursor.value.toXContent(builder, params);
+ builder.endObject();
+ if (indexToDefaultSettings.isEmpty() == false) {
+ builder.startObject("defaults");
+ indexToDefaultSettings.get(cursor.key).toXContent(builder, params);
+ builder.endObject();
+ }
+ builder.endObject();
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetSettingsResponse that = (GetSettingsResponse) o;
+ return Objects.equals(indexToSettings, that.indexToSettings) &&
+ Objects.equals(indexToDefaultSettings, that.indexToDefaultSettings);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(indexToSettings, indexToDefaultSettings);
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
index 3109fa4d405..9ce3ab17d33 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
@@ -37,19 +37,23 @@ import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.common.settings.IndexScopedSettings;
-import java.util.Map;
+import java.util.Arrays;
public class TransportGetSettingsAction extends TransportMasterNodeReadAction {
private final SettingsFilter settingsFilter;
+ private final IndexScopedSettings indexScopedSettings;
+
@Inject
public TransportGetSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, SettingsFilter settingsFilter, ActionFilters actionFilters,
- IndexNameExpressionResolver indexNameExpressionResolver) {
+ IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexedScopedSettings) {
super(settings, GetSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, GetSettingsRequest::new, indexNameExpressionResolver);
this.settingsFilter = settingsFilter;
+ this.indexScopedSettings = indexedScopedSettings;
}
@Override
@@ -69,25 +73,39 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction listener) {
Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
ImmutableOpenMap.Builder indexToSettingsBuilder = ImmutableOpenMap.builder();
+ ImmutableOpenMap.Builder indexToDefaultSettingsBuilder = ImmutableOpenMap.builder();
for (Index concreteIndex : concreteIndices) {
IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex);
if (indexMetaData == null) {
continue;
}
- Settings settings = settingsFilter.filter(indexMetaData.getSettings());
+ Settings indexSettings = settingsFilter.filter(indexMetaData.getSettings());
if (request.humanReadable()) {
- settings = IndexMetaData.addHumanReadableSettings(settings);
+ indexSettings = IndexMetaData.addHumanReadableSettings(indexSettings);
}
- if (CollectionUtils.isEmpty(request.names()) == false) {
- settings = settings.filter(k -> Regex.simpleMatch(request.names(), k));
+
+ if (isFilteredRequest(request)) {
+ indexSettings = indexSettings.filter(k -> Regex.simpleMatch(request.names(), k));
+ }
+
+ indexToSettingsBuilder.put(concreteIndex.getName(), indexSettings);
+ if (request.includeDefaults()) {
+ Settings defaultSettings = settingsFilter.filter(indexScopedSettings.diff(indexSettings, Settings.EMPTY));
+ if (isFilteredRequest(request)) {
+ defaultSettings = defaultSettings.filter(k -> Regex.simpleMatch(request.names(), k));
+ }
+ indexToDefaultSettingsBuilder.put(concreteIndex.getName(), defaultSettings);
}
- indexToSettingsBuilder.put(concreteIndex.getName(), settings);
}
- listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build()));
+ listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build(), indexToDefaultSettingsBuilder.build()));
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
index 8cd1fac6f6f..b018e24a565 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
@@ -39,6 +39,7 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
@@ -58,14 +59,14 @@ import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError;
+import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
-import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index template.
*/
-public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest {
+public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest, ToXContent {
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(PutIndexTemplateRequest.class));
@@ -539,4 +540,34 @@ public class PutIndexTemplateRequest extends MasterNodeRequest entry : mappings.entrySet()) {
+ Map mapping = XContentHelper.convertToMap(new BytesArray(entry.getValue()), false).v2();
+ builder.field(entry.getKey(), mapping);
+ }
+ builder.endObject();
+
+ builder.startObject("aliases");
+ for (Alias alias : aliases) {
+ alias.toXContent(builder, params);
+ }
+ builder.endObject();
+ return builder;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponse.java
index bf6e05a6c7b..6c8a5291b12 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponse.java
@@ -21,6 +21,8 @@ package org.elasticsearch.action.admin.indices.template.put;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@@ -47,4 +49,14 @@ public class PutIndexTemplateResponse extends AcknowledgedResponse {
super.writeTo(out);
writeAcknowledged(out);
}
+
+ private static final ConstructingObjectParser PARSER;
+ static {
+ PARSER = new ConstructingObjectParser<>("put_index_template", true, args -> new PutIndexTemplateResponse((boolean) args[0]));
+ declareAcknowledgedField(PARSER);
+ }
+
+ public static PutIndexTemplateResponse fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetaData.java
index 7a0b9285896..c813ba76e82 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetaData.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoriesMetaData.java
@@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
@@ -51,7 +52,7 @@ public class RepositoriesMetaData extends AbstractNamedDiffable implemen
* @param repositories list of repositories
*/
public RepositoriesMetaData(List repositories) {
- this.repositories = repositories;
+ this.repositories = Collections.unmodifiableList(repositories);
}
/**
@@ -107,7 +108,7 @@ public class RepositoriesMetaData extends AbstractNamedDiffable implemen
for (int i = 0; i < repository.length; i++) {
repository[i] = new RepositoryMetaData(in);
}
- this.repositories = Arrays.asList(repository);
+ this.repositories = Collections.unmodifiableList(Arrays.asList(repository));
}
public static NamedDiff readDiffFrom(StreamInput in) throws IOException {
diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java
index 9b994089be0..04bbb9279da 100644
--- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java
+++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java
@@ -31,6 +31,7 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
+import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
@@ -317,26 +318,24 @@ public class KeyStoreWrapper implements SecureSettings {
DataInputStream input = new DataInputStream(bytesStream)) {
int saltLen = input.readInt();
salt = new byte[saltLen];
- if (input.read(salt) != saltLen) {
- throw new SecurityException("Keystore has been corrupted or tampered with");
- }
+ input.readFully(salt);
int ivLen = input.readInt();
iv = new byte[ivLen];
- if (input.read(iv) != ivLen) {
- throw new SecurityException("Keystore has been corrupted or tampered with");
- }
+ input.readFully(iv);
int encryptedLen = input.readInt();
encryptedBytes = new byte[encryptedLen];
- if (input.read(encryptedBytes) != encryptedLen) {
+ input.readFully(encryptedBytes);
+ if (input.read() != -1) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
+ } catch (EOFException e) {
+ throw new SecurityException("Keystore has been corrupted or tampered with", e);
}
Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv);
try (ByteArrayInputStream bytesStream = new ByteArrayInputStream(encryptedBytes);
CipherInputStream cipherStream = new CipherInputStream(bytesStream, cipher);
DataInputStream input = new DataInputStream(cipherStream)) {
-
entries.set(new HashMap<>());
int numEntries = input.readInt();
while (numEntries-- > 0) {
@@ -344,11 +343,14 @@ public class KeyStoreWrapper implements SecureSettings {
EntryType entryType = EntryType.valueOf(input.readUTF());
int entrySize = input.readInt();
byte[] entryBytes = new byte[entrySize];
- if (input.read(entryBytes) != entrySize) {
- throw new SecurityException("Keystore has been corrupted or tampered with");
- }
+ input.readFully(entryBytes);
entries.get().put(setting, new Entry(entryType, entryBytes));
}
+ if (input.read() != -1) {
+ throw new SecurityException("Keystore has been corrupted or tampered with");
+ }
+ } catch (EOFException e) {
+ throw new SecurityException("Keystore has been corrupted or tampered with", e);
}
}
@@ -360,7 +362,6 @@ public class KeyStoreWrapper implements SecureSettings {
Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv);
try (CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher);
DataOutputStream output = new DataOutputStream(cipherStream)) {
-
output.writeInt(entries.get().size());
for (Map.Entry mapEntry : entries.get().entrySet()) {
output.writeUTF(mapEntry.getKey());
@@ -370,7 +371,6 @@ public class KeyStoreWrapper implements SecureSettings {
output.write(entry.bytes);
}
}
-
return bytes.toByteArray();
}
diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java
index 111663497d7..aae63f041fa 100644
--- a/server/src/main/java/org/elasticsearch/rest/RestController.java
+++ b/server/src/main/java/org/elasticsearch/rest/RestController.java
@@ -401,9 +401,15 @@ public class RestController extends AbstractComponent implements HttpServerTrans
* Handle a requests with no candidate handlers (return a 400 Bad Request
* error).
*/
- private void handleBadRequest(RestRequest request, RestChannel channel) {
- channel.sendResponse(new BytesRestResponse(BAD_REQUEST,
- "No handler found for uri [" + request.uri() + "] and method [" + request.method() + "]"));
+ private void handleBadRequest(RestRequest request, RestChannel channel) throws IOException {
+ try (XContentBuilder builder = channel.newErrorBuilder()) {
+ builder.startObject();
+ {
+ builder.field("error", "no handler found for uri [" + request.uri() + "] and method [" + request.method() + "]");
+ }
+ builder.endObject();
+ channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder));
+ }
}
/**
diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
index ed6fe2f95f4..4e06497c7fe 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java
@@ -22,25 +22,19 @@ package org.elasticsearch.rest.action.admin.cluster;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
import org.elasticsearch.client.node.NodeClient;
-import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
-import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
-import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler;
-import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
-import org.elasticsearch.rest.RestResponse;
-import org.elasticsearch.rest.action.RestBuilderListener;
+import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
import java.util.Set;
import static org.elasticsearch.client.Requests.getRepositoryRequest;
import static org.elasticsearch.rest.RestRequest.Method.GET;
-import static org.elasticsearch.rest.RestStatus.OK;
/**
* Returns repository information
@@ -69,18 +63,8 @@ public class RestGetRepositoriesAction extends BaseRestHandler {
getRepositoriesRequest.local(request.paramAsBoolean("local", getRepositoriesRequest.local()));
settingsFilter.addFilterSettingParams(request);
return channel ->
- client.admin().cluster().getRepositories(getRepositoriesRequest, new RestBuilderListener(channel) {
- @Override
- public RestResponse buildResponse(GetRepositoriesResponse response, XContentBuilder builder) throws Exception {
- builder.startObject();
- for (RepositoryMetaData repositoryMetaData : response.repositories()) {
- RepositoriesMetaData.toXContent(repositoryMetaData, builder, request);
- }
- builder.endObject();
-
- return new BytesRestResponse(OK, builder);
- }
- });
+ client.admin().cluster().getRepositories(getRepositoriesRequest,
+ new RestToXContentListener<>(channel));
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java
index 8ac7f12312a..9791994c773 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java
@@ -44,18 +44,12 @@ import static org.elasticsearch.rest.RestStatus.OK;
public class RestGetSettingsAction extends BaseRestHandler {
- private final IndexScopedSettings indexScopedSettings;
- private final SettingsFilter settingsFilter;
-
- public RestGetSettingsAction(Settings settings, RestController controller, IndexScopedSettings indexScopedSettings,
- final SettingsFilter settingsFilter) {
+ public RestGetSettingsAction(Settings settings, RestController controller) {
super(settings);
- this.indexScopedSettings = indexScopedSettings;
controller.registerHandler(GET, "/_settings/{name}", this);
controller.registerHandler(GET, "/{index}/_settings", this);
controller.registerHandler(GET, "/{index}/_settings/{name}", this);
controller.registerHandler(GET, "/{index}/_setting/{name}", this);
- this.settingsFilter = settingsFilter;
}
@Override
@@ -73,31 +67,16 @@ public class RestGetSettingsAction extends BaseRestHandler {
.indices(Strings.splitStringByCommaToArray(request.param("index")))
.indicesOptions(IndicesOptions.fromRequest(request, IndicesOptions.strictExpandOpen()))
.humanReadable(request.hasParam("human"))
+ .includeDefaults(renderDefaults)
.names(names);
getSettingsRequest.local(request.paramAsBoolean("local", getSettingsRequest.local()));
+ getSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSettingsRequest.masterNodeTimeout()));
return channel -> client.admin().indices().getSettings(getSettingsRequest, new RestBuilderListener(channel) {
@Override
public RestResponse buildResponse(GetSettingsResponse getSettingsResponse, XContentBuilder builder) throws Exception {
- builder.startObject();
- for (ObjectObjectCursor cursor : getSettingsResponse.getIndexToSettings()) {
- // no settings, jump over it to shorten the response data
- if (cursor.value.isEmpty()) {
- continue;
- }
- builder.startObject(cursor.key);
- builder.startObject("settings");
- cursor.value.toXContent(builder, request);
- builder.endObject();
- if (renderDefaults) {
- builder.startObject("defaults");
- settingsFilter.filter(indexScopedSettings.diff(cursor.value, settings)).toXContent(builder, request);
- builder.endObject();
- }
- builder.endObject();
- }
- builder.endObject();
+ getSettingsResponse.toXContent(builder, request);
return new BytesRestResponse(OK, builder);
}
});
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java
new file mode 100644
index 00000000000..11f0188c8c0
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.indices.settings.get;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.IndicesRequest;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.settings.IndexScopedSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.settings.SettingsFilter;
+import org.elasticsearch.common.settings.SettingsModule;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.transport.CapturingTransport;
+import org.elasticsearch.threadpool.TestThreadPool;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+import org.junit.After;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
+
+public class GetSettingsActionTests extends ESTestCase {
+
+ private TransportService transportService;
+ private ClusterService clusterService;
+ private ThreadPool threadPool;
+ private SettingsFilter settingsFilter;
+ private final String indexName = "test_index";
+
+ private TestTransportGetSettingsAction getSettingsAction;
+
+ class TestTransportGetSettingsAction extends TransportGetSettingsAction {
+ TestTransportGetSettingsAction() {
+ super(Settings.EMPTY, GetSettingsActionTests.this.transportService, GetSettingsActionTests.this.clusterService,
+ GetSettingsActionTests.this.threadPool, settingsFilter, new ActionFilters(Collections.emptySet()),
+ new Resolver(Settings.EMPTY), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
+ }
+ @Override
+ protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) {
+ ClusterState stateWithIndex = ClusterStateCreationUtils.state(indexName, 1, 1);
+ super.masterOperation(request, stateWithIndex, listener);
+ }
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ settingsFilter = new SettingsModule(Settings.EMPTY, Collections.emptyList(), Collections.emptyList()).getSettingsFilter();
+ threadPool = new TestThreadPool("GetSettingsActionTests");
+ clusterService = createClusterService(threadPool);
+ CapturingTransport capturingTransport = new CapturingTransport();
+ transportService = new TransportService(clusterService.getSettings(), capturingTransport, threadPool,
+ TransportService.NOOP_TRANSPORT_INTERCEPTOR,
+ boundAddress -> clusterService.localNode(), null, Collections.emptySet());
+ transportService.start();
+ transportService.acceptIncomingRequests();
+ getSettingsAction = new GetSettingsActionTests.TestTransportGetSettingsAction();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
+ threadPool = null;
+ clusterService.close();
+ super.tearDown();
+ }
+
+ public void testIncludeDefaults() {
+ GetSettingsRequest noDefaultsRequest = new GetSettingsRequest().indices(indexName);
+ getSettingsAction.execute(null, noDefaultsRequest, ActionListener.wrap(noDefaultsResponse -> {
+ assertNull("index.refresh_interval should be null as it was never set", noDefaultsResponse.getSetting(indexName,
+ "index.refresh_interval"));
+ }, exception -> {
+ throw new AssertionError(exception);
+ }));
+
+ GetSettingsRequest defaultsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true);
+
+ getSettingsAction.execute(null, defaultsRequest, ActionListener.wrap(defaultsResponse -> {
+ assertNotNull("index.refresh_interval should be set as we are including defaults", defaultsResponse.getSetting(indexName,
+ "index.refresh_interval"));
+ }, exception -> {
+ throw new AssertionError(exception);
+ }));
+
+ }
+
+ public void testIncludeDefaultsWithFiltering() {
+ GetSettingsRequest defaultsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true)
+ .names("index.refresh_interval");
+ getSettingsAction.execute(null, defaultsRequest, ActionListener.wrap(defaultsResponse -> {
+ assertNotNull("index.refresh_interval should be set as we are including defaults", defaultsResponse.getSetting(indexName,
+ "index.refresh_interval"));
+ assertNull("index.number_of_shards should be null as this query is filtered",
+ defaultsResponse.getSetting(indexName, "index.number_of_shards"));
+ assertNull("index.warmer.enabled should be null as this query is filtered",
+ defaultsResponse.getSetting(indexName, "index.warmer.enabled"));
+ }, exception -> {
+ throw new AssertionError(exception);
+ }));
+ }
+
+ static class Resolver extends IndexNameExpressionResolver {
+ Resolver(Settings settings) {
+ super(settings);
+ }
+
+ @Override
+ public String[] concreteIndexNames(ClusterState state, IndicesRequest request) {
+ return request.indices();
+ }
+
+ @Override
+ public Index[] concreteIndices(ClusterState state, IndicesRequest request) {
+ Index[] out = new Index[request.indices().length];
+ for (int x = 0; x < out.length; x++) {
+ out[x] = new Index(request.indices()[x], "_na_");
+ }
+ return out;
+ }
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java
new file mode 100644
index 00000000000..d70c7702991
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.indices.settings.get;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.Base64;
+
+public class GetSettingsRequestTests extends ESTestCase {
+ private static final String TEST_622_REQUEST_BYTES = "ADwDAAEKdGVzdF9pbmRleA4BEHRlc3Rfc2V0dGluZ19rZXkB";
+ private static final GetSettingsRequest TEST_622_REQUEST = new GetSettingsRequest()
+ .indices("test_index")
+ .names("test_setting_key")
+ .humanReadable(true);
+ private static final GetSettingsRequest TEST_700_REQUEST = new GetSettingsRequest()
+ .includeDefaults(true)
+ .humanReadable(true)
+ .indices("test_index")
+ .names("test_setting_key");
+
+ public void testSerdeRoundTrip() throws IOException {
+ BytesStreamOutput bso = new BytesStreamOutput();
+ TEST_700_REQUEST.writeTo(bso);
+
+ byte[] responseBytes = BytesReference.toBytes(bso.bytes());
+ StreamInput si = StreamInput.wrap(responseBytes);
+ GetSettingsRequest deserialized = new GetSettingsRequest(si);
+ assertEquals(TEST_700_REQUEST, deserialized);
+ }
+
+ public void testSerializeBackwardsCompatibility() throws IOException {
+ BytesStreamOutput bso = new BytesStreamOutput();
+ bso.setVersion(Version.V_6_2_2);
+ TEST_700_REQUEST.writeTo(bso);
+
+ byte[] responseBytes = BytesReference.toBytes(bso.bytes());
+ assertEquals(TEST_622_REQUEST_BYTES, Base64.getEncoder().encodeToString(responseBytes));
+ }
+
+ public void testDeserializeBackwardsCompatibility() throws IOException {
+ StreamInput si = StreamInput.wrap(Base64.getDecoder().decode(TEST_622_REQUEST_BYTES));
+ si.setVersion(Version.V_6_2_2);
+ GetSettingsRequest deserialized = new GetSettingsRequest(si);
+ assertEquals(TEST_622_REQUEST, deserialized);
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java
new file mode 100644
index 00000000000..cf125257c36
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.indices.settings.get;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.IndexScopedSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.index.RandomCreateIndexGenerator;
+import org.elasticsearch.test.AbstractStreamableXContentTestCase;
+import org.junit.Assert;
+
+import java.io.IOException;
+import java.util.Base64;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.function.Predicate;
+
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
+import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING;
+
+public class GetSettingsResponseTests extends AbstractStreamableXContentTestCase {
+
+ /*
+ index.number_of_shards=2,index.number_of_replicas=1. The below base64'd bytes were generated by
+ code from the 6.2.2 tag.
+ */
+ private static final String TEST_6_2_2_RESPONSE_BYTES =
+ "AQppbmRleF9uYW1lAhhpbmRleC5udW1iZXJfb2ZfcmVwbGljYXMAATEWaW5kZXgubnVtYmVyX29mX3NoYXJkcwABMg==";
+
+ /* This response object was generated using similar code to the code used to create the above bytes */
+ private static final GetSettingsResponse TEST_6_2_2_RESPONSE_INSTANCE = getExpectedTest622Response();
+
+ @Override
+ protected GetSettingsResponse createBlankInstance() {
+ return new GetSettingsResponse();
+ }
+
+ @Override
+ protected GetSettingsResponse createTestInstance() {
+ HashMap indexToSettings = new HashMap<>();
+ HashMap indexToDefaultSettings = new HashMap<>();
+
+ IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS;
+
+ Set indexNames = new HashSet();
+ int numIndices = randomIntBetween(1, 5);
+ for (int x=0;x immutableIndexToSettings =
+ ImmutableOpenMap.builder().putAll(indexToSettings).build();
+
+
+ if (randomBoolean()) {
+ for (String indexName : indexToSettings.keySet()) {
+ Settings defaultSettings = indexScopedSettings.diff(indexToSettings.get(indexName), Settings.EMPTY);
+ indexToDefaultSettings.put(indexName, defaultSettings);
+ }
+ }
+
+ ImmutableOpenMap immutableIndexToDefaultSettings =
+ ImmutableOpenMap.builder().putAll(indexToDefaultSettings).build();
+
+ return new GetSettingsResponse(immutableIndexToSettings, immutableIndexToDefaultSettings);
+ }
+
+ @Override
+ protected GetSettingsResponse doParseInstance(XContentParser parser) throws IOException {
+ return GetSettingsResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ //we do not want to add new fields at the root (index-level), or inside settings blocks
+ return f -> f.equals("") || f.contains(".settings") || f.contains(".defaults");
+ }
+
+ private static GetSettingsResponse getExpectedTest622Response() {
+ /* This is a fairly direct copy of the code used to generate the base64'd response above -- with the caveat that the constructor
+ has been modified so that the code compiles on this version of elasticsearch
+ */
+ HashMap indexToSettings = new HashMap<>();
+ Settings.Builder builder = Settings.builder();
+
+ builder.put(SETTING_NUMBER_OF_SHARDS, 2);
+ builder.put(SETTING_NUMBER_OF_REPLICAS, 1);
+ indexToSettings.put("index_name", builder.build());
+ GetSettingsResponse response = new GetSettingsResponse(ImmutableOpenMap.builder().putAll(indexToSettings).build
+ (), ImmutableOpenMap.of());
+ return response;
+ }
+
+ private static GetSettingsResponse getResponseWithNewFields() {
+ HashMap indexToDefaultSettings = new HashMap<>();
+ Settings.Builder builder = Settings.builder();
+
+ builder.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s");
+ indexToDefaultSettings.put("index_name", builder.build());
+ ImmutableOpenMap defaultsMap = ImmutableOpenMap.builder().putAll(indexToDefaultSettings)
+ .build();
+ return new GetSettingsResponse(getExpectedTest622Response().getIndexToSettings(), defaultsMap);
+ }
+
+ public void testCanDecode622Response() throws IOException {
+ StreamInput si = StreamInput.wrap(Base64.getDecoder().decode(TEST_6_2_2_RESPONSE_BYTES));
+ si.setVersion(Version.V_6_2_2);
+ GetSettingsResponse response = new GetSettingsResponse();
+ response.readFrom(si);
+
+ Assert.assertEquals(TEST_6_2_2_RESPONSE_INSTANCE, response);
+ }
+
+ public void testCanOutput622Response() throws IOException {
+ GetSettingsResponse responseWithExtraFields = getResponseWithNewFields();
+ BytesStreamOutput bso = new BytesStreamOutput();
+ bso.setVersion(Version.V_6_2_2);
+ responseWithExtraFields.writeTo(bso);
+
+ String base64OfResponse = Base64.getEncoder().encodeToString(BytesReference.toBytes(bso.bytes()));
+
+ Assert.assertEquals(TEST_6_2_2_RESPONSE_BYTES, base64OfResponse);
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
index 72cbe2bd9ec..29421345259 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java
@@ -20,10 +20,15 @@ package org.elasticsearch.action.admin.indices.template.put;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
@@ -35,6 +40,7 @@ import java.util.Base64;
import java.util.Collections;
import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
@@ -131,4 +137,52 @@ public class PutIndexTemplateRequestTests extends ESTestCase {
assertThat(noError, is(nullValue()));
}
+ private PutIndexTemplateRequest randomPutIndexTemplateRequest() throws IOException {
+ PutIndexTemplateRequest request = new PutIndexTemplateRequest();
+ request.name("test");
+ if (randomBoolean()){
+ request.version(randomInt());
+ }
+ if (randomBoolean()){
+ request.order(randomInt());
+ }
+ request.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
+ int numAlias = between(0, 5);
+ for (int i = 0; i < numAlias; i++) {
+ Alias alias = new Alias(randomRealisticUnicodeOfLengthBetween(1, 10));
+ if (randomBoolean()) {
+ alias.indexRouting(randomRealisticUnicodeOfLengthBetween(1, 10));
+ }
+ if (randomBoolean()) {
+ alias.searchRouting(randomRealisticUnicodeOfLengthBetween(1, 10));
+ }
+ request.alias(alias);
+ }
+ if (randomBoolean()) {
+ request.mapping("doc", XContentFactory.jsonBuilder().startObject()
+ .startObject("doc").startObject("properties")
+ .startObject("field-" + randomInt()).field("type", randomFrom("keyword", "text")).endObject()
+ .endObject().endObject().endObject());
+ }
+ if (randomBoolean()){
+ request.settings(Settings.builder().put("setting1", randomLong()).put("setting2", randomTimeValue()).build());
+ }
+ return request;
+ }
+
+ public void testFromToXContentPutTemplateRequest() throws Exception {
+ for (int i = 0; i < 10; i++) {
+ PutIndexTemplateRequest expected = randomPutIndexTemplateRequest();
+ XContentType xContentType = randomFrom(XContentType.values());
+ BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
+ PutIndexTemplateRequest parsed = new PutIndexTemplateRequest().source(shuffled, xContentType);
+ assertNotSame(expected, parsed);
+ assertThat(parsed.version(), equalTo(expected.version()));
+ assertThat(parsed.order(), equalTo(expected.order()));
+ assertThat(parsed.patterns(), equalTo(expected.patterns()));
+ assertThat(parsed.aliases(), equalTo(expected.aliases()));
+ assertThat(parsed.mappings(), equalTo(expected.mappings()));
+ assertThat(parsed.settings(), equalTo(expected.settings()));
+ }
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java
index a59cdf13c13..e2283608736 100644
--- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java
+++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java
@@ -19,36 +19,41 @@
package org.elasticsearch.common.settings;
+import javax.crypto.Cipher;
+import javax.crypto.CipherOutputStream;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
+import javax.crypto.spec.GCMParameterSpec;
import javax.crypto.spec.PBEKeySpec;
+import javax.crypto.spec.SecretKeySpec;
import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
-import java.nio.CharBuffer;
-import java.nio.charset.CharsetEncoder;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystem;
import java.nio.file.Path;
import java.security.KeyStore;
+import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.SimpleFSDirectory;
+import org.elasticsearch.common.Randomness;
import org.elasticsearch.core.internal.io.IOUtils;
-import org.elasticsearch.bootstrap.BootstrapSettings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
public class KeyStoreWrapperTests extends ESTestCase {
@@ -104,6 +109,149 @@ public class KeyStoreWrapperTests extends ESTestCase {
assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString());
}
+ public void testFailWhenCannotConsumeSecretStream() throws Exception {
+ Path configDir = env.configFile();
+ SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
+ try (IndexOutput indexOutput = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) {
+ CodecUtil.writeHeader(indexOutput, "elasticsearch.keystore", 3);
+ indexOutput.writeByte((byte) 0); // No password
+ SecureRandom random = Randomness.createSecure();
+ byte[] salt = new byte[64];
+ random.nextBytes(salt);
+ byte[] iv = new byte[12];
+ random.nextBytes(iv);
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ CipherOutputStream cipherStream = getCipherStream(bytes, salt, iv);
+ DataOutputStream output = new DataOutputStream(cipherStream);
+ // Indicate that the secret string is longer than it is so readFully() fails
+ possiblyAlterSecretString(output, -4);
+ cipherStream.close();
+ final byte[] encryptedBytes = bytes.toByteArray();
+ possiblyAlterEncryptedBytes(indexOutput, salt, iv, encryptedBytes, 0);
+ CodecUtil.writeFooter(indexOutput);
+ }
+
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(configDir);
+ SecurityException e = expectThrows(SecurityException.class, () -> keystore.decrypt(new char[0]));
+ assertThat(e.getMessage(), containsString("Keystore has been corrupted or tampered with"));
+ assertThat(e.getCause(), instanceOf(EOFException.class));
+ }
+
+ public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception {
+ Path configDir = env.configFile();
+ SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
+ try (IndexOutput indexOutput = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) {
+ CodecUtil.writeHeader(indexOutput, "elasticsearch.keystore", 3);
+ indexOutput.writeByte((byte) 0); // No password
+ SecureRandom random = Randomness.createSecure();
+ byte[] salt = new byte[64];
+ random.nextBytes(salt);
+ byte[] iv = new byte[12];
+ random.nextBytes(iv);
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ CipherOutputStream cipherStream = getCipherStream(bytes, salt, iv);
+ DataOutputStream output = new DataOutputStream(cipherStream);
+
+ possiblyAlterSecretString(output, 0);
+ cipherStream.close();
+ final byte[] encryptedBytes = bytes.toByteArray();
+ // Indicate that the encryptedBytes is larger than it is so readFully() fails
+ possiblyAlterEncryptedBytes(indexOutput, salt, iv, encryptedBytes, -12);
+ CodecUtil.writeFooter(indexOutput);
+ }
+
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(configDir);
+ SecurityException e = expectThrows(SecurityException.class, () -> keystore.decrypt(new char[0]));
+ assertThat(e.getMessage(), containsString("Keystore has been corrupted or tampered with"));
+ assertThat(e.getCause(), instanceOf(EOFException.class));
+ }
+
+ public void testFailWhenSecretStreamNotConsumed() throws Exception {
+ Path configDir = env.configFile();
+ SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
+ try (IndexOutput indexOutput = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) {
+ CodecUtil.writeHeader(indexOutput, "elasticsearch.keystore", 3);
+ indexOutput.writeByte((byte) 0); // No password
+ SecureRandom random = Randomness.createSecure();
+ byte[] salt = new byte[64];
+ random.nextBytes(salt);
+ byte[] iv = new byte[12];
+ random.nextBytes(iv);
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ CipherOutputStream cipherStream = getCipherStream(bytes, salt, iv);
+ DataOutputStream output = new DataOutputStream(cipherStream);
+ // So that readFully during decryption will not consume the entire stream
+ possiblyAlterSecretString(output, 4);
+ cipherStream.close();
+ final byte[] encryptedBytes = bytes.toByteArray();
+ possiblyAlterEncryptedBytes(indexOutput, salt, iv, encryptedBytes, 0);
+ CodecUtil.writeFooter(indexOutput);
+ }
+
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(configDir);
+ SecurityException e = expectThrows(SecurityException.class, () -> keystore.decrypt(new char[0]));
+ assertThat(e.getMessage(), containsString("Keystore has been corrupted or tampered with"));
+ }
+
+ public void testFailWhenEncryptedBytesStreamIsNotConsumed() throws Exception {
+ Path configDir = env.configFile();
+ SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
+ try (IndexOutput indexOutput = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) {
+ CodecUtil.writeHeader(indexOutput, "elasticsearch.keystore", 3);
+ indexOutput.writeByte((byte) 0); // No password
+ SecureRandom random = Randomness.createSecure();
+ byte[] salt = new byte[64];
+ random.nextBytes(salt);
+ byte[] iv = new byte[12];
+ random.nextBytes(iv);
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ CipherOutputStream cipherStream = getCipherStream(bytes, salt, iv);
+ DataOutputStream output = new DataOutputStream(cipherStream);
+ possiblyAlterSecretString(output, 0);
+ cipherStream.close();
+ final byte[] encryptedBytes = bytes.toByteArray();
+ possiblyAlterEncryptedBytes(indexOutput, salt, iv, encryptedBytes, randomIntBetween(2, encryptedBytes.length));
+ CodecUtil.writeFooter(indexOutput);
+ }
+
+ KeyStoreWrapper keystore = KeyStoreWrapper.load(configDir);
+ SecurityException e = expectThrows(SecurityException.class, () -> keystore.decrypt(new char[0]));
+ assertThat(e.getMessage(), containsString("Keystore has been corrupted or tampered with"));
+ }
+
+ private CipherOutputStream getCipherStream(ByteArrayOutputStream bytes, byte[] salt, byte[] iv) throws Exception {
+ PBEKeySpec keySpec = new PBEKeySpec(new char[0], salt, 10000, 128);
+ SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512");
+ SecretKey secretKey = keyFactory.generateSecret(keySpec);
+ SecretKeySpec secret = new SecretKeySpec(secretKey.getEncoded(), "AES");
+ GCMParameterSpec spec = new GCMParameterSpec(128, iv);
+ Cipher cipher = Cipher.getInstance("AES/GCM/NoPadding");
+ cipher.init(Cipher.ENCRYPT_MODE, secret, spec);
+ cipher.updateAAD(salt);
+ return new CipherOutputStream(bytes, cipher);
+ }
+
+ private void possiblyAlterSecretString(DataOutputStream output, int truncLength) throws Exception {
+ byte[] secret_value = "super_secret_value".getBytes(StandardCharsets.UTF_8);
+ output.writeInt(1); // One entry
+ output.writeUTF("string_setting");
+ output.writeUTF("STRING");
+ output.writeInt(secret_value.length - truncLength);
+ output.write(secret_value);
+ }
+
+ private void possiblyAlterEncryptedBytes(IndexOutput indexOutput, byte[] salt, byte[] iv, byte[] encryptedBytes, int
+ truncEncryptedDataLength)
+ throws Exception {
+ indexOutput.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length);
+ indexOutput.writeInt(salt.length);
+ indexOutput.writeBytes(salt, salt.length);
+ indexOutput.writeInt(iv.length);
+ indexOutput.writeBytes(iv, iv.length);
+ indexOutput.writeInt(encryptedBytes.length - truncEncryptedDataLength);
+ indexOutput.writeBytes(encryptedBytes, encryptedBytes.length);
+ }
+
public void testUpgradeAddsSeed() throws Exception {
KeyStoreWrapper keystore = KeyStoreWrapper.create();
keystore.remove(KeyStoreWrapper.SEED_SETTING.getKey());
diff --git a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
index 2998ec8a6ba..fab38a2b73b 100644
--- a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
+++ b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
@@ -22,7 +22,6 @@ package org.elasticsearch.discovery;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
@@ -61,10 +60,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
+import static org.elasticsearch.action.DocWriteResponse.Result.CREATED;
+import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.isOneOf;
import static org.hamcrest.Matchers.not;
/**
@@ -135,7 +137,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
.setSource("{}", XContentType.JSON)
.setTimeout(timeout)
.get(timeout);
- assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
+ assertThat(response.getResult(), isOneOf(CREATED, UPDATED));
ackedDocs.put(id, node);
logger.trace("[{}] indexed id [{}] through node [{}], response [{}]", name, id, node, response);
} catch (ElasticsearchException e) {
diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index e399c02d6cc..979c44dd5fc 100644
--- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -133,6 +133,7 @@ import java.util.Base64;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -1385,18 +1386,13 @@ public class InternalEngineTests extends EngineTestCase {
}
protected List generateSingleDocHistory(boolean forReplica, VersionType versionType,
- boolean partialOldPrimary, long primaryTerm,
- int minOpCount, int maxOpCount) {
+ long primaryTerm,
+ int minOpCount, int maxOpCount, String docId) {
final int numOfOps = randomIntBetween(minOpCount, maxOpCount);
final List ops = new ArrayList<>();
- final Term id = newUid("1");
- final int startWithSeqNo;
- if (partialOldPrimary) {
- startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1);
- } else {
- startWithSeqNo = 0;
- }
- final String valuePrefix = forReplica ? "r_" : "p_";
+ final Term id = newUid(docId);
+ final int startWithSeqNo = 0;
+ final String valuePrefix = (forReplica ? "r_" : "p_" ) + docId + "_";
final boolean incrementTermWhenIntroducingSeqNo = randomBoolean();
for (int i = 0; i < numOfOps; i++) {
final Engine.Operation op;
@@ -1418,7 +1414,7 @@ public class InternalEngineTests extends EngineTestCase {
throw new UnsupportedOperationException("unknown version type: " + versionType);
}
if (randomBoolean()) {
- op = new Engine.Index(id, testParsedDocument("1", null, testDocumentWithTextField(valuePrefix + i), B_1, null),
+ op = new Engine.Index(id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), B_1, null),
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
@@ -1427,7 +1423,7 @@ public class InternalEngineTests extends EngineTestCase {
System.currentTimeMillis(), -1, false
);
} else {
- op = new Engine.Delete("test", "1", id,
+ op = new Engine.Delete("test", docId, id,
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
@@ -1442,7 +1438,7 @@ public class InternalEngineTests extends EngineTestCase {
public void testOutOfOrderDocsOnReplica() throws IOException {
final List ops = generateSingleDocHistory(true,
- randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE), false, 2, 2, 20);
+ randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE), 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true);
}
@@ -1511,28 +1507,83 @@ public class InternalEngineTests extends EngineTestCase {
}
}
- public void testConcurrentOutOfDocsOnReplica() throws IOException, InterruptedException {
- final List ops = generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300);
- final Engine.Operation lastOp = ops.get(ops.size() - 1);
- final String lastFieldValue;
- if (lastOp instanceof Engine.Index) {
- Engine.Index index = (Engine.Index) lastOp;
- lastFieldValue = index.docs().get(0).get("value");
+ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, InterruptedException {
+ final List opsDoc1 =
+ generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), 2, 100, 300, "1");
+ final Engine.Operation lastOpDoc1 = opsDoc1.get(opsDoc1.size() - 1);
+ final String lastFieldValueDoc1;
+ if (lastOpDoc1 instanceof Engine.Index) {
+ Engine.Index index = (Engine.Index) lastOpDoc1;
+ lastFieldValueDoc1 = index.docs().get(0).get("value");
} else {
// delete
- lastFieldValue = null;
+ lastFieldValueDoc1 = null;
}
- shuffle(ops, random());
- concurrentlyApplyOps(ops, engine);
+ final List opsDoc2 =
+ generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), 2, 100, 300, "2");
+ final Engine.Operation lastOpDoc2 = opsDoc2.get(opsDoc2.size() - 1);
+ final String lastFieldValueDoc2;
+ if (lastOpDoc2 instanceof Engine.Index) {
+ Engine.Index index = (Engine.Index) lastOpDoc2;
+ lastFieldValueDoc2 = index.docs().get(0).get("value");
+ } else {
+ // delete
+ lastFieldValueDoc2 = null;
+ }
+ // randomly interleave
+ final AtomicLong seqNoGenerator = new AtomicLong();
+ Function seqNoUpdater = operation -> {
+ final long newSeqNo = seqNoGenerator.getAndIncrement();
+ if (operation instanceof Engine.Index) {
+ Engine.Index index = (Engine.Index) operation;
+ return new Engine.Index(index.uid(), index.parsedDoc(), newSeqNo, index.primaryTerm(), index.version(),
+ index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry());
+ } else {
+ Engine.Delete delete = (Engine.Delete) operation;
+ return new Engine.Delete(delete.type(), delete.id(), delete.uid(), newSeqNo, delete.primaryTerm(),
+ delete.version(), delete.versionType(), delete.origin(), delete.startTime());
+ }
+ };
+ final List allOps = new ArrayList<>();
+ Iterator iter1 = opsDoc1.iterator();
+ Iterator iter2 = opsDoc2.iterator();
+ while (iter1.hasNext() && iter2.hasNext()) {
+ final Engine.Operation next = randomBoolean() ? iter1.next() : iter2.next();
+ allOps.add(seqNoUpdater.apply(next));
+ }
+ iter1.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o)));
+ iter2.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o)));
+ // insert some duplicates
+ allOps.addAll(randomSubsetOf(allOps));
- assertVisibleCount(engine, lastFieldValue == null ? 0 : 1);
- if (lastFieldValue != null) {
+ shuffle(allOps, random());
+ concurrentlyApplyOps(allOps, engine);
+
+ engine.refresh("test");
+
+ if (lastFieldValueDoc1 != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
- searcher.searcher().search(new TermQuery(new Term("value", lastFieldValue)), collector);
+ searcher.searcher().search(new TermQuery(new Term("value", lastFieldValueDoc1)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
+ if (lastFieldValueDoc2 != null) {
+ try (Searcher searcher = engine.acquireSearcher("test")) {
+ final TotalHitCountCollector collector = new TotalHitCountCollector();
+ searcher.searcher().search(new TermQuery(new Term("value", lastFieldValueDoc2)), collector);
+ assertThat(collector.getTotalHits(), equalTo(1));
+ }
+ }
+
+ int totalExpectedOps = 0;
+ if (lastFieldValueDoc1 != null) {
+ totalExpectedOps++;
+ }
+ if (lastFieldValueDoc2 != null) {
+ totalExpectedOps++;
+ }
+ assertVisibleCount(engine, totalExpectedOps);
}
private void concurrentlyApplyOps(List ops, InternalEngine engine) throws InterruptedException {
@@ -1572,12 +1623,12 @@ public class InternalEngineTests extends EngineTestCase {
}
public void testInternalVersioningOnPrimary() throws IOException {
- final List ops = generateSingleDocHistory(false, VersionType.INTERNAL, false, 2, 2, 20);
+ final List ops = generateSingleDocHistory(false, VersionType.INTERNAL, 2, 2, 20, "1");
assertOpsOnPrimary(ops, Versions.NOT_FOUND, true, engine);
}
public void testVersionOnPrimaryWithConcurrentRefresh() throws Exception {
- List ops = generateSingleDocHistory(false, VersionType.INTERNAL, false, 2, 10, 100);
+ List ops = generateSingleDocHistory(false, VersionType.INTERNAL, 2, 10, 100, "1");
CountDownLatch latch = new CountDownLatch(1);
AtomicBoolean running = new AtomicBoolean(true);
Thread refreshThread = new Thread(() -> {
@@ -1697,7 +1748,7 @@ public class InternalEngineTests extends EngineTestCase {
final Set nonInternalVersioning = new HashSet<>(Arrays.asList(VersionType.values()));
nonInternalVersioning.remove(VersionType.INTERNAL);
final VersionType versionType = randomFrom(nonInternalVersioning);
- final List ops = generateSingleDocHistory(false, versionType, false, 2, 2, 20);
+ final List ops = generateSingleDocHistory(false, versionType, 2, 2, 20, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
@@ -1775,8 +1826,8 @@ public class InternalEngineTests extends EngineTestCase {
}
public void testVersioningPromotedReplica() throws IOException {
- final List replicaOps = generateSingleDocHistory(true, VersionType.INTERNAL, false, 1, 2, 20);
- List primaryOps = generateSingleDocHistory(false, VersionType.INTERNAL, false, 2, 2, 20);
+ final List replicaOps = generateSingleDocHistory(true, VersionType.INTERNAL, 1, 2, 20, "1");
+ List primaryOps = generateSingleDocHistory(false, VersionType.INTERNAL, 2, 2, 20, "1");
Engine.Operation lastReplicaOp = replicaOps.get(replicaOps.size() - 1);
final boolean deletedOnReplica = lastReplicaOp instanceof Engine.Delete;
final long finalReplicaVersion = lastReplicaOp.version();
@@ -1796,7 +1847,7 @@ public class InternalEngineTests extends EngineTestCase {
}
public void testConcurrentExternalVersioningOnPrimary() throws IOException, InterruptedException {
- final List ops = generateSingleDocHistory(false, VersionType.EXTERNAL, false, 2, 100, 300);
+ final List ops = generateSingleDocHistory(false, VersionType.EXTERNAL, 2, 100, 300, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java
index 7627dafa5a9..17ae1def235 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java
@@ -112,7 +112,7 @@ public class RepositoriesMetaDataSerializationTests extends AbstractDiffableSeri
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
RepositoriesMetaData repositoriesMetaData = RepositoriesMetaData.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
- List repos = repositoriesMetaData.repositories();
+ List repos = new ArrayList<>(repositoriesMetaData.repositories());
repos.sort(Comparator.comparing(RepositoryMetaData::name));
return new RepositoriesMetaData(repos);
}
diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java
index 232ad14aabc..656ec8c1fb0 100644
--- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java
@@ -131,6 +131,7 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
.put("englishminimalstem", MovedToAnalysisCommon.class)
.put("englishpossessive", MovedToAnalysisCommon.class)
.put("finnishlightstem", MovedToAnalysisCommon.class)
+ .put("fixedshingle", MovedToAnalysisCommon.class)
.put("frenchlightstem", MovedToAnalysisCommon.class)
.put("frenchminimalstem", MovedToAnalysisCommon.class)
.put("galicianminimalstem", MovedToAnalysisCommon.class)
diff --git a/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc b/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc
index 1cbcf623a5f..bf4800d50d2 100644
--- a/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc
+++ b/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc
@@ -108,9 +108,7 @@ On cluster `two`, this role allows the user to query local indices called
-----------------------------------------------------------
POST /_xpack/security/role/cluster_two_logs
{
- "cluster": [
- "transport_client"
- ],
+ "cluster": [],
"indices": [
{
"names": [
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java
index bde19aa2786..d0995d22c7e 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/ModelPlotConfig.java
@@ -53,10 +53,6 @@ public class ModelPlotConfig implements ToXContentObject, Writeable {
this(true, null);
}
- public ModelPlotConfig(boolean enabled) {
- this(false, null);
- }
-
public ModelPlotConfig(boolean enabled, String terms) {
this.enabled = enabled;
this.terms = terms;
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java
index 80223027e8e..d906ccf2f7a 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java
@@ -82,7 +82,7 @@ public class DataCountsReporter extends AbstractComponent {
totalRecordStats = counts;
incrementalRecordStats = new DataCounts(job.getId());
- diagnostics = new DataStreamDiagnostics(job);
+ diagnostics = new DataStreamDiagnostics(job, counts);
acceptablePercentDateParseErrors = ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING.get(settings);
acceptablePercentOutOfOrderErrors = ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING.get(settings);
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/BucketDiagnostics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/BucketDiagnostics.java
index c61926dfb04..a4497653497 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/BucketDiagnostics.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/BucketDiagnostics.java
@@ -6,8 +6,11 @@
package org.elasticsearch.xpack.ml.job.process.diagnostics;
import org.elasticsearch.xpack.core.ml.job.config.Job;
+import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.core.ml.utils.Intervals;
+import java.util.Date;
+
/**
* A moving window of buckets that allow keeping
* track of some statistics like the bucket count,
@@ -33,12 +36,17 @@ class BucketDiagnostics {
private long latestFlushedBucketStartMs = -1;
private final BucketFlushListener bucketFlushListener;
- BucketDiagnostics(Job job, BucketFlushListener bucketFlushListener) {
+ BucketDiagnostics(Job job, DataCounts dataCounts, BucketFlushListener bucketFlushListener) {
bucketSpanMs = job.getAnalysisConfig().getBucketSpan().millis();
latencyMs = job.getAnalysisConfig().getLatency() == null ? 0 : job.getAnalysisConfig().getLatency().millis();
maxSize = Math.max((int) (Intervals.alignToCeil(latencyMs, bucketSpanMs) / bucketSpanMs), MIN_BUCKETS);
buckets = new long[maxSize];
this.bucketFlushListener = bucketFlushListener;
+
+ Date latestRecordTimestamp = dataCounts.getLatestRecordTimeStamp();
+ if (latestRecordTimestamp != null) {
+ addRecord(latestRecordTimestamp.getTime());
+ }
}
void addRecord(long recordTimestampMs) {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnostics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnostics.java
index a19f6eba023..a225587d0bb 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnostics.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnostics.java
@@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.job.process.diagnostics;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.job.config.Job;
+import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import java.util.Date;
@@ -32,8 +33,8 @@ public class DataStreamDiagnostics {
private long sparseBucketCount = 0;
private long latestSparseBucketTime = -1;
- public DataStreamDiagnostics(Job job) {
- bucketDiagnostics = new BucketDiagnostics(job, createBucketFlushListener());
+ public DataStreamDiagnostics(Job job, DataCounts dataCounts) {
+ bucketDiagnostics = new BucketDiagnostics(job, dataCounts, createBucketFlushListener());
}
private BucketDiagnostics.BucketFlushListener createBucketFlushListener() {
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnosticsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnosticsTests.java
index 19f7f88c38f..0d9c52a28bd 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnosticsTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/diagnostics/DataStreamDiagnosticsTests.java
@@ -11,6 +11,7 @@ import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
+import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.junit.Before;
import java.util.Arrays;
@@ -20,6 +21,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
private static final long BUCKET_SPAN = 60000;
private Job job;
+ private DataCounts dataCounts;
@Before
public void setUpMocks() {
@@ -32,10 +34,11 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
builder.setAnalysisConfig(acBuilder);
builder.setDataDescription(new DataDescription.Builder());
job = createJob(TimeValue.timeValueMillis(BUCKET_SPAN), null);
+ dataCounts = new DataCounts(job.getId());
}
public void testIncompleteBuckets() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(1000);
d.checkRecord(2000);
@@ -81,7 +84,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testSimple() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(70000);
d.checkRecord(130000);
@@ -103,7 +106,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testSimpleReverse() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(610000);
d.checkRecord(550000);
@@ -126,7 +129,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
public void testWithLatencyLessThanTenBuckets() {
job = createJob(TimeValue.timeValueMillis(BUCKET_SPAN), TimeValue.timeValueMillis(3 * BUCKET_SPAN));
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
long timestamp = 70000;
while (timestamp < 70000 + 20 * BUCKET_SPAN) {
@@ -141,7 +144,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
public void testWithLatencyGreaterThanTenBuckets() {
job = createJob(TimeValue.timeValueMillis(BUCKET_SPAN), TimeValue.timeValueMillis(13 * BUCKET_SPAN + 10000));
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
long timestamp = 70000;
while (timestamp < 70000 + 20 * BUCKET_SPAN) {
@@ -155,7 +158,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testEmptyBuckets() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(10000);
d.checkRecord(70000);
@@ -177,7 +180,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testEmptyBucketsStartLater() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(1110000);
d.checkRecord(1170000);
@@ -199,7 +202,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testSparseBuckets() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
sendManyDataPoints(d, 10000, 69000, 1000);
sendManyDataPoints(d, 70000, 129000, 1200);
@@ -227,7 +230,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
* signal
*/
public void testSparseBucketsLast() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
sendManyDataPoints(d, 10000, 69000, 1000);
sendManyDataPoints(d, 70000, 129000, 1200);
@@ -255,7 +258,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
* signal on the 2nd to last
*/
public void testSparseBucketsLastTwo() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
sendManyDataPoints(d, 10000, 69000, 1000);
sendManyDataPoints(d, 70000, 129000, 1200);
@@ -280,7 +283,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testMixedEmptyAndSparseBuckets() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
sendManyDataPoints(d, 10000, 69000, 1000);
sendManyDataPoints(d, 70000, 129000, 1200);
@@ -308,7 +311,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
* whether counts are right.
*/
public void testEmptyBucketsLongerOutage() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.checkRecord(10000);
d.checkRecord(70000);
@@ -336,7 +339,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
* The number of sparse buckets should not be to much, it could be normal.
*/
public void testSparseBucketsLongerPeriod() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
sendManyDataPoints(d, 10000, 69000, 1000);
sendManyDataPoints(d, 70000, 129000, 1200);
@@ -374,7 +377,7 @@ public class DataStreamDiagnosticsTests extends ESTestCase {
}
public void testFlushAfterZeroRecords() {
- DataStreamDiagnostics d = new DataStreamDiagnostics(job);
+ DataStreamDiagnostics d = new DataStreamDiagnostics(job, dataCounts);
d.flush();
assertEquals(0, d.getBucketCount());
}
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java
index ef8c8bdb8c5..9dcc2e482d0 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java
@@ -56,10 +56,12 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps;
import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
+import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig;
import org.elasticsearch.xpack.rollup.Rollup;
import org.elasticsearch.xpack.rollup.RollupJobIdentifierUtils;
import org.elasticsearch.xpack.rollup.RollupRequestTranslator;
import org.elasticsearch.xpack.rollup.RollupResponseTranslator;
+import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@@ -277,6 +279,7 @@ public class TransportRollupSearchAction extends TransportAction incorrectTimeZones = new ArrayList<>();
List rewrittenFieldName = jobCaps.stream()
// We only care about job caps that have the query's target field
.filter(caps -> caps.getFieldCaps().keySet().contains(fieldName))
@@ -286,6 +289,24 @@ public class TransportRollupSearchAction extends TransportAction {
String type = (String)agg.get(RollupField.AGG);
+
+ // If the cap is for a date_histo, and the query is a range, the timezones need to match
+ if (type.equals(DateHistogramAggregationBuilder.NAME) && builder instanceof RangeQueryBuilder) {
+ String timeZone = ((RangeQueryBuilder)builder).timeZone();
+
+ // Many range queries don't include the timezone because the default is UTC, but the query
+ // builder will return null so we need to set it here
+ if (timeZone == null) {
+ timeZone = DateTimeZone.UTC.toString();
+ }
+ boolean matchingTZ = ((String)agg.get(DateHistoGroupConfig.TIME_ZONE.getPreferredName()))
+ .equalsIgnoreCase(timeZone);
+ if (matchingTZ == false) {
+ incorrectTimeZones.add((String)agg.get(DateHistoGroupConfig.TIME_ZONE.getPreferredName()));
+ }
+ return matchingTZ;
+ }
+ // Otherwise just make sure it's one of the three groups
return type.equals(TermsAggregationBuilder.NAME)
|| type.equals(DateHistogramAggregationBuilder.NAME)
|| type.equals(HistogramAggregationBuilder.NAME);
@@ -304,8 +325,14 @@ public class TransportRollupSearchAction extends TransportAction 1) {
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
index 23d60ef01e4..d9d3e672a0a 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
@@ -121,16 +121,38 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build());
Set caps = new HashSet<>();
caps.add(cap);
- QueryBuilder rewritten = null;
- try {
- rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps);
- } catch (Exception e) {
- fail("Should not have thrown exception when parsing query.");
- }
+ QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("UTC"), caps);
assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp"));
}
+ public void testRangeNullTimeZone() {
+ RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
+ GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
+ group.setDateHisto(new DateHistoGroupConfig.Builder().setField("foo").setInterval(new DateHistogramInterval("1h")).build());
+ job.setGroupConfig(group.build());
+ RollupJobCaps cap = new RollupJobCaps(job.build());
+ Set caps = new HashSet<>();
+ caps.add(cap);
+ QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps);
+ assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
+ assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp"));
+ }
+
+ public void testRangeWrongTZ() {
+ RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
+ GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
+ group.setDateHisto(new DateHistoGroupConfig.Builder().setField("foo").setInterval(new DateHistogramInterval("1h")).build());
+ job.setGroupConfig(group.build());
+ RollupJobCaps cap = new RollupJobCaps(job.build());
+ Set caps = new HashSet<>();
+ caps.add(cap);
+ Exception e = expectThrows(IllegalArgumentException.class,
+ () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("EST"), caps));
+ assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " +
+ "compatible. Options include: [UTC]"));
+ }
+
public void testTerms() {
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
@@ -139,12 +161,7 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build());
Set caps = new HashSet<>();
caps.add(cap);
- QueryBuilder rewritten = null;
- try {
- rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps);
- } catch (Exception e) {
- fail("Should not have thrown exception when parsing query.");
- }
+ QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps);
assertThat(rewritten, instanceOf(TermQueryBuilder.class));
assertThat(((TermQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value"));
}
@@ -160,12 +177,7 @@ public class SearchActionTests extends ESTestCase {
BoolQueryBuilder builder = new BoolQueryBuilder();
builder.must(getQueryBuilder(2));
- QueryBuilder rewritten = null;
- try {
- rewritten = TransportRollupSearchAction.rewriteQuery(builder, caps);
- } catch (Exception e) {
- fail("Should not have thrown exception when parsing query.");
- }
+ QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(builder, caps);
assertThat(rewritten, instanceOf(BoolQueryBuilder.class));
assertThat(((BoolQueryBuilder)rewritten).must().size(), equalTo(1));
}
@@ -178,12 +190,8 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build());
Set caps = new HashSet<>();
caps.add(cap);
- try {
- QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new MatchAllQueryBuilder(), caps);
- assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class));
- } catch (Exception e) {
- fail("Should not have thrown exception when parsing query.");
- }
+ QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new MatchAllQueryBuilder(), caps);
+ assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class));
}
public void testAmbiguousResolution() {
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java
index 5c0000d4304..4c4d0afc10d 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java
@@ -40,10 +40,10 @@ import java.util.SortedMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.stream.Collectors;
+import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER;
+
public class IndicesAndAliasesResolver {
- private static final ResolvedIndices NO_INDEX_PLACEHOLDER_RESOLVED =
- ResolvedIndices.local(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER);
//`*,-*` what we replace indices with if we need Elasticsearch to return empty responses without throwing exception
private static final String[] NO_INDICES_ARRAY = new String[] { "*", "-*" };
static final List NO_INDICES_LIST = Arrays.asList(NO_INDICES_ARRAY);
@@ -87,12 +87,14 @@ public class IndicesAndAliasesResolver {
public ResolvedIndices resolve(TransportRequest request, MetaData metaData, AuthorizedIndices authorizedIndices) {
if (request instanceof IndicesAliasesRequest) {
- ResolvedIndices indices = ResolvedIndices.empty();
+ ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder();
IndicesAliasesRequest indicesAliasesRequest = (IndicesAliasesRequest) request;
for (IndicesRequest indicesRequest : indicesAliasesRequest.getAliasActions()) {
- indices = ResolvedIndices.add(indices, resolveIndicesAndAliases(indicesRequest, metaData, authorizedIndices));
+ final ResolvedIndices resolved = resolveIndicesAndAliases(indicesRequest, metaData, authorizedIndices);
+ resolvedIndicesBuilder.addLocal(resolved.getLocal());
+ resolvedIndicesBuilder.addRemote(resolved.getRemote());
}
- return indices;
+ return resolvedIndicesBuilder.build();
}
// if for some reason we are missing an action... just for safety we'll reject
@@ -102,10 +104,10 @@ public class IndicesAndAliasesResolver {
return resolveIndicesAndAliases((IndicesRequest) request, metaData, authorizedIndices);
}
- ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData metaData,
- AuthorizedIndices authorizedIndices) {
+
+ ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData metaData, AuthorizedIndices authorizedIndices) {
+ final ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder();
boolean indicesReplacedWithNoIndices = false;
- final ResolvedIndices indices;
if (indicesRequest instanceof PutMappingRequest && ((PutMappingRequest) indicesRequest).getConcreteIndex() != null) {
/*
* This is a special case since PutMappingRequests from dynamic mapping updates have a concrete index
@@ -114,7 +116,7 @@ public class IndicesAndAliasesResolver {
*/
assert indicesRequest.indices() == null || indicesRequest.indices().length == 0
: "indices are: " + Arrays.toString(indicesRequest.indices()); // Arrays.toString() can handle null values - all good
- return ResolvedIndices.local(((PutMappingRequest) indicesRequest).getConcreteIndex().getName());
+ resolvedIndicesBuilder.addLocal(((PutMappingRequest) indicesRequest).getConcreteIndex().getName());
} else if (indicesRequest instanceof IndicesRequest.Replaceable) {
IndicesRequest.Replaceable replaceable = (IndicesRequest.Replaceable) indicesRequest;
final boolean replaceWildcards = indicesRequest.indicesOptions().expandWildcardsOpen()
@@ -127,13 +129,12 @@ public class IndicesAndAliasesResolver {
indicesOptions.expandWildcardsOpen(), indicesOptions.expandWildcardsClosed());
}
- ResolvedIndices result = ResolvedIndices.empty();
// check for all and return list of authorized indices
if (IndexNameExpressionResolver.isAllIndices(indicesList(indicesRequest.indices()))) {
if (replaceWildcards) {
for (String authorizedIndex : authorizedIndices.get()) {
if (isIndexVisible(authorizedIndex, indicesOptions, metaData)) {
- result = ResolvedIndices.add(result, ResolvedIndices.local(authorizedIndex));
+ resolvedIndicesBuilder.addLocal(authorizedIndex);
}
}
}
@@ -144,7 +145,7 @@ public class IndicesAndAliasesResolver {
if (allowsRemoteIndices(indicesRequest)) {
split = remoteClusterResolver.splitLocalAndRemoteIndexNames(indicesRequest.indices());
} else {
- split = ResolvedIndices.local(indicesRequest.indices());
+ split = new ResolvedIndices(Arrays.asList(indicesRequest.indices()), Collections.emptyList());
}
List replaced = replaceWildcardsWithAuthorizedIndices(split.getLocal(), indicesOptions, metaData,
authorizedIndices.get(), replaceWildcards);
@@ -153,22 +154,23 @@ public class IndicesAndAliasesResolver {
//remove all the ones that the current user is not authorized for and ignore them
replaced = replaced.stream().filter(authorizedIndices.get()::contains).collect(Collectors.toList());
}
- result = new ResolvedIndices(new ArrayList<>(replaced), split.getRemote());
+ resolvedIndicesBuilder.addLocal(replaced);
+ resolvedIndicesBuilder.addRemote(split.getRemote());
}
- if (result.isEmpty()) {
+
+ if (resolvedIndicesBuilder.isEmpty()) {
if (indicesOptions.allowNoIndices()) {
//this is how we tell es core to return an empty response, we can let the request through being sure
//that the '-*' wildcard expression will be resolved to no indices. We can't let empty indices through
//as that would be resolved to _all by es core.
replaceable.indices(NO_INDICES_ARRAY);
indicesReplacedWithNoIndices = true;
- indices = NO_INDEX_PLACEHOLDER_RESOLVED;
+ resolvedIndicesBuilder.addLocal(NO_INDEX_PLACEHOLDER);
} else {
throw new IndexNotFoundException(Arrays.toString(indicesRequest.indices()));
}
} else {
- replaceable.indices(result.toArray());
- indices = result;
+ replaceable.indices(resolvedIndicesBuilder.build().toArray());
}
} else {
if (containsWildcards(indicesRequest)) {
@@ -182,11 +184,9 @@ public class IndicesAndAliasesResolver {
//That is fine though because they never contain wildcards, as they get replaced as part of the authorization of their
//corresponding parent request on the coordinating node. Hence wildcards don't need to get replaced nor exploded for
// shard level requests.
- List resolvedNames = new ArrayList<>();
for (String name : indicesRequest.indices()) {
- resolvedNames.add(nameExpressionResolver.resolveDateMathExpression(name));
+ resolvedIndicesBuilder.addLocal(nameExpressionResolver.resolveDateMathExpression(name));
}
- indices = new ResolvedIndices(resolvedNames, new ArrayList<>());
}
if (indicesRequest instanceof AliasesRequest) {
@@ -207,10 +207,10 @@ public class IndicesAndAliasesResolver {
//if we replaced the indices with '-*' we shouldn't be adding the aliases to the list otherwise the request will
//not get authorized. Leave only '-*' and ignore the rest, result will anyway be empty.
} else {
- return ResolvedIndices.add(indices, ResolvedIndices.local(aliasesRequest.aliases()));
+ resolvedIndicesBuilder.addLocal(aliasesRequest.aliases());
}
}
- return indices;
+ return resolvedIndicesBuilder.build();
}
public static boolean allowsRemoteIndices(IndicesRequest request) {
@@ -423,24 +423,8 @@ public class IndicesAndAliasesResolver {
private final List remote;
ResolvedIndices(List local, List remote) {
- this.local = local;
- this.remote = remote;
- }
-
- /**
- * Constructs a new instance of this class where both the {@link #getLocal() local} and {@link #getRemote() remote} index lists
- * are empty.
- */
- private static ResolvedIndices empty() {
- return new ResolvedIndices(Collections.emptyList(), Collections.emptyList());
- }
-
- /**
- * Constructs a new instance of this class where both the {@link #getLocal() local} index list is populated with names
- * and the {@link #getRemote() remote} index list is empty.
- */
- private static ResolvedIndices local(String... names) {
- return new ResolvedIndices(Arrays.asList(names), Collections.emptyList());
+ this.local = Collections.unmodifiableList(local);
+ this.remote = Collections.unmodifiableList(remote);
}
/**
@@ -449,14 +433,14 @@ public class IndicesAndAliasesResolver {
* to [ "-a1", "a*" ]
. As a consequence, this list may contain duplicates .
*/
public List getLocal() {
- return Collections.unmodifiableList(local);
+ return local;
}
/**
* Returns the collection of index names that have been stored as "remote" indices.
*/
public List getRemote() {
- return Collections.unmodifiableList(remote);
+ return remote;
}
/**
@@ -471,7 +455,7 @@ public class IndicesAndAliasesResolver {
* {@link IndicesAndAliasesResolverField#NO_INDEX_PLACEHOLDER no-index-placeholder} and nothing else.
*/
public boolean isNoIndicesPlaceholder() {
- return remote.isEmpty() && local.size() == 1 && local.contains(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER);
+ return remote.isEmpty() && local.size() == 1 && local.contains(NO_INDEX_PLACEHOLDER);
}
private String[] toArray() {
@@ -487,19 +471,43 @@ public class IndicesAndAliasesResolver {
}
/**
- * Returns a new ResolvedIndices
contains the {@link #getLocal() local} and {@link #getRemote() remote}
- * index lists from b
appended to the corresponding lists in a
.
+ * Builder class for ResolvedIndices that allows for the building of a list of indices
+ * without the need to construct new objects and merging them together
*/
- private static ResolvedIndices add(ResolvedIndices a, ResolvedIndices b) {
- List local = new ArrayList<>(a.local.size() + b.local.size());
- local.addAll(a.local);
- local.addAll(b.local);
+ private static class Builder {
- List remote = new ArrayList<>(a.remote.size() + b.remote.size());
- remote.addAll(a.remote);
- remote.addAll(b.remote);
- return new ResolvedIndices(local, remote);
+ private final List local = new ArrayList<>();
+ private final List remote = new ArrayList<>();
+
+ /** add a local index name */
+ private void addLocal(String index) {
+ local.add(index);
+ }
+
+ /** adds the array of local index names */
+ private void addLocal(String[] indices) {
+ local.addAll(Arrays.asList(indices));
+ }
+
+ /** adds the list of local index names */
+ private void addLocal(List indices) {
+ local.addAll(indices);
+ }
+
+ /** adds the list of remote index names */
+ private void addRemote(List indices) {
+ remote.addAll(indices);
+ }
+
+ /** @return true
if both the local and remote index lists are empty. */
+ private boolean isEmpty() {
+ return local.isEmpty() && remote.isEmpty();
+ }
+
+ /** @return a immutable ResolvedIndices instance with the local and remote index lists */
+ private ResolvedIndices build() {
+ return new ResolvedIndices(local, remote);
+ }
}
-
}
}
diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.3.0.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.3.0.jar.sha1
deleted file mode 100644
index e12c932b38d..00000000000
--- a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-040e2de30c5e6bad868b144e371730200719ceb3
\ No newline at end of file
diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..20da7d0f78e
--- /dev/null
+++ b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+0b06e4f6514256a3f187a9892e520638b9c59e63
\ No newline at end of file
diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.3.0.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.3.0.jar.sha1
deleted file mode 100644
index e12c932b38d..00000000000
--- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-040e2de30c5e6bad868b144e371730200719ceb3
\ No newline at end of file
diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
new file mode 100644
index 00000000000..20da7d0f78e
--- /dev/null
+++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1ed95c097b.jar.sha1
@@ -0,0 +1 @@
+0b06e4f6514256a3f187a9892e520638b9c59e63
\ No newline at end of file
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java
index fe9479f3c1a..31d933f9f59 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java
@@ -113,12 +113,36 @@ public class CompositeAggregationCursor implements Cursor {
SearchRequest search = Querier.prepareRequest(client, query, cfg.pageTimeout(), indices);
- client.search(search, ActionListener.wrap(r -> {
- updateCompositeAfterKey(r, query);
- CompositeAggsRowSet rowSet = new CompositeAggsRowSet(extractors, r, limit,
- serializeQuery(query), indices);
- listener.onResponse(rowSet);
- }, listener::onFailure));
+ client.search(search, new ActionListener() {
+ @Override
+ public void onResponse(SearchResponse r) {
+ try {
+ // retry
+ if (shouldRetryDueToEmptyPage(r)) {
+ CompositeAggregationCursor.updateCompositeAfterKey(r, search.source());
+ client.search(search, this);
+ return;
+ }
+
+ updateCompositeAfterKey(r, query);
+ CompositeAggsRowSet rowSet = new CompositeAggsRowSet(extractors, r, limit, serializeQuery(query), indices);
+ listener.onResponse(rowSet);
+ } catch (Exception ex) {
+ listener.onFailure(ex);
+ }
+ }
+
+ @Override
+ public void onFailure(Exception ex) {
+ listener.onFailure(ex);
+ }
+ });
+ }
+
+ static boolean shouldRetryDueToEmptyPage(SearchResponse response) {
+ CompositeAggregation composite = getComposite(response);
+ // if there are no buckets but a next page, go fetch it instead of sending an empty response to the client
+ return composite != null && composite.getBuckets().isEmpty() && composite.afterKey() != null && !composite.afterKey().isEmpty();
}
static CompositeAggregation getComposite(SearchResponse response) {
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java
index 3c10f08c53a..62941a5b14f 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java
@@ -206,8 +206,15 @@ public class Querier {
protected void handleResponse(SearchResponse response, ActionListener listener) {
// there are some results
if (response.getAggregations().asList().size() > 0) {
- CompositeAggregationCursor.updateCompositeAfterKey(response, request.source());
+ // retry
+ if (CompositeAggregationCursor.shouldRetryDueToEmptyPage(response)) {
+ CompositeAggregationCursor.updateCompositeAfterKey(response, request.source());
+ client.search(request, this);
+ return;
+ }
+
+ CompositeAggregationCursor.updateCompositeAfterKey(response, request.source());
byte[] nextSearch = null;
try {
nextSearch = CompositeAggregationCursor.serializeQuery(request.source());
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java
index d280a150e8d..dcfb713a665 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java
@@ -167,6 +167,7 @@ public class WatcherService extends AbstractComponent {
void stopExecutor() {
ThreadPool.terminate(executor, 10L, TimeUnit.SECONDS);
}
+
/**
* Reload the watcher service, does not switch the state from stopped to started, just keep going
* @param state cluster state, which is needed to find out about local shards
@@ -231,6 +232,7 @@ public class WatcherService extends AbstractComponent {
* manual watch execution, i.e. via the execute watch API
*/
public void pauseExecution(String reason) {
+ triggerService.pauseExecution();
int cancelledTaskCount = executionService.pause();
logger.info("paused watch execution, reason [{}], cancelled [{}] queued tasks", reason, cancelledTaskCount);
}
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java
index 92726fb94cd..5f815170215 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java
@@ -43,10 +43,14 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.XPackSettings;
+import org.elasticsearch.xpack.core.watcher.trigger.Trigger;
import org.elasticsearch.xpack.core.watcher.watch.Watch;
import org.elasticsearch.xpack.core.watcher.watch.WatchStatus;
+import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition;
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore;
+import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput;
+import org.elasticsearch.xpack.watcher.trigger.TriggerEngine;
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
import org.elasticsearch.xpack.watcher.watch.WatchParser;
import org.joda.time.DateTime;
@@ -204,6 +208,36 @@ public class WatcherServiceTests extends ESTestCase {
assertThat(watches, hasSize(activeWatchCount));
}
+ public void testPausingWatcherServiceAlsoPausesTriggerService() {
+ String engineType = "foo";
+ TriggerEngine triggerEngine = mock(TriggerEngine.class);
+ when(triggerEngine.type()).thenReturn(engineType);
+ TriggerService triggerService = new TriggerService(Settings.EMPTY, Collections.singleton(triggerEngine));
+
+ Trigger trigger = mock(Trigger.class);
+ when(trigger.type()).thenReturn(engineType);
+
+ Watch watch = mock(Watch.class);
+ when(watch.trigger()).thenReturn(trigger);
+ when(watch.condition()).thenReturn(InternalAlwaysCondition.INSTANCE);
+ ExecutableNoneInput noneInput = new ExecutableNoneInput(logger);
+ when(watch.input()).thenReturn(noneInput);
+
+ triggerService.add(watch);
+ assertThat(triggerService.count(), is(1L));
+
+ WatcherService service = new WatcherService(Settings.EMPTY, triggerService, mock(TriggeredWatchStore.class),
+ mock(ExecutionService.class), mock(WatchParser.class), mock(Client.class), executorService) {
+ @Override
+ void stopExecutor() {
+ }
+ };
+
+ service.pauseExecution("pausing");
+ assertThat(triggerService.count(), is(0L));
+ verify(triggerEngine).pauseExecution();
+ }
+
private static DiscoveryNode newNode() {
return new DiscoveryNode("node", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(),
new HashSet<>(asList(DiscoveryNode.Role.values())), Version.CURRENT);
diff --git a/x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java
index 3b84994f5ac..e7381050260 100644
--- a/x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java
+++ b/x-pack/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java
@@ -241,7 +241,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
assertEquals(0, responseBody.get("invalid_date_count"));
assertEquals(0, responseBody.get("missing_field_count"));
assertEquals(0, responseBody.get("out_of_order_timestamp_count"));
- assertEquals(0, responseBody.get("bucket_count"));
+ assertEquals(1000, responseBody.get("bucket_count"));
// unintuitive: should return the earliest record timestamp of this feed???
assertEquals(null, responseBody.get("earliest_record_timestamp"));
@@ -266,7 +266,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
assertEquals(0, dataCountsDoc.get("invalid_date_count"));
assertEquals(0, dataCountsDoc.get("missing_field_count"));
assertEquals(0, dataCountsDoc.get("out_of_order_timestamp_count"));
- assertEquals(0, dataCountsDoc.get("bucket_count"));
+ assertEquals(1000, dataCountsDoc.get("bucket_count"));
assertEquals(1403481600000L, dataCountsDoc.get("earliest_record_timestamp"));
assertEquals(1407082000000L, dataCountsDoc.get("latest_record_timestamp"));
diff --git a/x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java b/x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java
index e7a0a6028d4..3bb9566e5bf 100644
--- a/x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java
+++ b/x-pack/qa/ml-disabled/src/test/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java
@@ -51,6 +51,6 @@ public class MlPluginDisabledIT extends ESRestTestCase {
ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest("put",
MachineLearning.BASE_PATH + "anomaly_detectors/foo", Collections.emptyMap(),
new StringEntity(Strings.toString(xContentBuilder), ContentType.APPLICATION_JSON)));
- assertThat(exception.getMessage(), containsString("No handler found for uri [/_xpack/ml/anomaly_detectors/foo] and method [PUT]"));
+ assertThat(exception.getMessage(), containsString("no handler found for uri [/_xpack/ml/anomaly_detectors/foo] and method [PUT]"));
}
}
diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java
new file mode 100644
index 00000000000..eb0c125a13c
--- /dev/null
+++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.ml.integration;
+
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.bucket.terms.Terms;
+import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
+import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
+import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
+import org.elasticsearch.xpack.core.ml.job.config.Detector;
+import org.elasticsearch.xpack.core.ml.job.config.Job;
+import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig;
+import org.junit.After;
+import org.junit.Before;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+
+public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase {
+
+ private static final String DATA_INDEX = "model-plots-test-data";
+ private static final String DATA_TYPE = "doc";
+
+ @Before
+ public void setUpData() {
+ client().admin().indices().prepareCreate(DATA_INDEX)
+ .addMapping(DATA_TYPE, "time", "type=date,format=epoch_millis", "user", "type=keyword")
+ .get();
+
+ List users = Arrays.asList("user_1", "user_2", "user_3");
+
+ // We are going to create data for last day
+ long nowMillis = System.currentTimeMillis();
+ int totalBuckets = 24;
+ BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
+ for (int bucket = 0; bucket < totalBuckets; bucket++) {
+ long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis();
+ for (String user : users) {
+ IndexRequest indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE);
+ indexRequest.source("time", timestamp, "user", user);
+ bulkRequestBuilder.add(indexRequest);
+ }
+ }
+
+ BulkResponse bulkResponse = bulkRequestBuilder
+ .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
+ .get();
+ assertThat(bulkResponse.hasFailures(), is(false));
+ }
+
+ @After
+ public void tearDownData() {
+ client().admin().indices().prepareDelete(DATA_INDEX).get();
+ cleanUp();
+ }
+
+ public void testPartitionFieldWithoutTerms() throws Exception {
+ Job.Builder job = jobWithPartitionUser("model-plots-it-test-partition-field-without-terms");
+ job.setModelPlotConfig(new ModelPlotConfig());
+ registerJob(job);
+ putJob(job);
+ String datafeedId = job.getId() + "-feed";
+ DatafeedConfig datafeed = newDatafeed(datafeedId, job.getId());
+ registerDatafeed(datafeed);
+ putDatafeed(datafeed);
+ openJob(job.getId());
+ startDatafeed(datafeedId, 0, System.currentTimeMillis());
+ waitUntilJobIsClosed(job.getId());
+
+ assertThat(getBuckets(job.getId()).size(), equalTo(23));
+ Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value");
+ assertThat(modelPlotTerms, containsInAnyOrder("user_1", "user_2", "user_3"));
+ }
+
+ public void testPartitionFieldWithTerms() throws Exception {
+ Job.Builder job = jobWithPartitionUser("model-plots-it-test-partition-field-with-terms");
+ job.setModelPlotConfig(new ModelPlotConfig(true, "user_2,user_3"));
+ registerJob(job);
+ putJob(job);
+ String datafeedId = job.getId() + "-feed";
+ DatafeedConfig datafeed = newDatafeed(datafeedId, job.getId());
+ registerDatafeed(datafeed);
+ putDatafeed(datafeed);
+ openJob(job.getId());
+ startDatafeed(datafeedId, 0, System.currentTimeMillis());
+ waitUntilJobIsClosed(job.getId());
+
+ assertThat(getBuckets(job.getId()).size(), equalTo(23));
+ Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value");
+ assertThat(modelPlotTerms, containsInAnyOrder("user_2", "user_3"));
+ }
+
+ public void testByFieldWithTerms() throws Exception {
+ Job.Builder job = jobWithByUser("model-plots-it-test-by-field-with-terms");
+ job.setModelPlotConfig(new ModelPlotConfig(true, "user_2,user_3"));
+ registerJob(job);
+ putJob(job);
+ String datafeedId = job.getId() + "-feed";
+ DatafeedConfig datafeed = newDatafeed(datafeedId, job.getId());
+ registerDatafeed(datafeed);
+ putDatafeed(datafeed);
+ openJob(job.getId());
+ startDatafeed(datafeedId, 0, System.currentTimeMillis());
+ waitUntilJobIsClosed(job.getId());
+
+ assertThat(getBuckets(job.getId()).size(), equalTo(23));
+ Set modelPlotTerms = modelPlotTerms(job.getId(), "by_field_value");
+ assertThat(modelPlotTerms, containsInAnyOrder("user_2", "user_3"));
+ }
+
+ private static Job.Builder jobWithPartitionUser(String id) {
+ Detector.Builder detector = new Detector.Builder();
+ detector.setFunction("count");
+ detector.setPartitionFieldName("user");
+ return newJobBuilder(id, detector.build());
+ }
+
+ private static Job.Builder jobWithByUser(String id) {
+ Detector.Builder detector = new Detector.Builder();
+ detector.setFunction("count");
+ detector.setByFieldName("user");
+ return newJobBuilder(id, detector.build());
+ }
+
+ private static Job.Builder newJobBuilder(String id, Detector detector) {
+ AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Arrays.asList(detector));
+ analysisConfig.setBucketSpan(TimeValue.timeValueHours(1));
+ DataDescription.Builder dataDescription = new DataDescription.Builder();
+ dataDescription.setTimeField("time");
+ Job.Builder jobBuilder = new Job.Builder(id);
+ jobBuilder.setAnalysisConfig(analysisConfig);
+ jobBuilder.setDataDescription(dataDescription);
+ return jobBuilder;
+ }
+
+ private static DatafeedConfig newDatafeed(String datafeedId, String jobId) {
+ DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
+ datafeedConfig.setIndices(Arrays.asList(DATA_INDEX));
+ return datafeedConfig.build();
+ }
+
+ private Set modelPlotTerms(String jobId, String fieldName) {
+ SearchResponse searchResponse = client().prepareSearch(".ml-anomalies-" + jobId)
+ .setQuery(QueryBuilders.termQuery("result_type", "model_plot"))
+ .addAggregation(AggregationBuilders.terms("model_plot_terms").field(fieldName))
+ .get();
+
+ Terms aggregation = searchResponse.getAggregations().get("model_plot_terms");
+ return aggregation.getBuckets().stream().map(agg -> agg.getKeyAsString()).collect(Collectors.toSet());
+ }
+}
diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java
new file mode 100644
index 00000000000..993f3707237
--- /dev/null
+++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.ml.integration;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.xpack.core.ml.action.GetBucketsAction;
+import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
+import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
+import org.elasticsearch.xpack.core.ml.job.config.Detector;
+import org.elasticsearch.xpack.core.ml.job.config.Job;
+import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
+import org.junit.After;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Tests that after reopening a job and sending more
+ * data after a gap, data counts are reported correctly.
+ */
+public class ReopenJobWithGapIT extends MlNativeAutodetectIntegTestCase {
+
+ private static final String JOB_ID = "reopen-job-with-gap-test";
+ private static final long BUCKET_SPAN_SECONDS = 3600;
+
+ @After
+ public void cleanUpTest() {
+ cleanUp();
+ }
+
+ public void test() throws Exception {
+ AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(
+ Collections.singletonList(new Detector.Builder("count", null).build()));
+ analysisConfig.setBucketSpan(TimeValue.timeValueSeconds(BUCKET_SPAN_SECONDS));
+ DataDescription.Builder dataDescription = new DataDescription.Builder();
+ dataDescription.setTimeFormat("epoch");
+ Job.Builder job = new Job.Builder(JOB_ID);
+ job.setAnalysisConfig(analysisConfig);
+ job.setDataDescription(dataDescription);
+
+ registerJob(job);
+ putJob(job);
+ openJob(job.getId());
+
+ long timestamp = 1483228800L; // 2017-01-01T00:00:00Z
+ List data = new ArrayList<>();
+ for (int i = 0; i < 10; i++) {
+ data.add(createJsonRecord(createRecord(timestamp)));
+ timestamp += BUCKET_SPAN_SECONDS;
+ }
+
+ postData(job.getId(), data.stream().collect(Collectors.joining()));
+ flushJob(job.getId(), true);
+ closeJob(job.getId());
+
+ GetBucketsAction.Request request = new GetBucketsAction.Request(job.getId());
+ request.setExcludeInterim(true);
+ assertThat(client().execute(GetBucketsAction.INSTANCE, request).actionGet().getBuckets().count(), equalTo(9L));
+ assertThat(getJobStats(job.getId()).get(0).getDataCounts().getBucketCount(), equalTo(9L));
+
+ timestamp += 10 * BUCKET_SPAN_SECONDS;
+ data = new ArrayList<>();
+ for (int i = 0; i < 10; i++) {
+ data.add(createJsonRecord(createRecord(timestamp)));
+ timestamp += BUCKET_SPAN_SECONDS;
+ }
+
+ openJob(job.getId());
+ postData(job.getId(), data.stream().collect(Collectors.joining()));
+ flushJob(job.getId(), true);
+ closeJob(job.getId());
+
+ assertThat(client().execute(GetBucketsAction.INSTANCE, request).actionGet().getBuckets().count(), equalTo(29L));
+ DataCounts dataCounts = getJobStats(job.getId()).get(0).getDataCounts();
+ assertThat(dataCounts.getBucketCount(), equalTo(29L));
+ assertThat(dataCounts.getEmptyBucketCount(), equalTo(10L));
+ }
+
+ private static Map createRecord(long timestamp) {
+ Map record = new HashMap<>();
+ record.put("time", timestamp);
+ return record;
+ }
+}
diff --git a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml
index 89db4df927e..dc18ecd8a70 100644
--- a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml
+++ b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml
@@ -19,7 +19,7 @@ setup:
name: "x_cluster_role"
body: >
{
- "cluster": ["all"],
+ "cluster": [],
"indices": [
{
"names": ["local_index", "my_remote_cluster:test_i*", "my_remote_cluster:aliased_test_index", "test_remote_cluster:test_i*", "my_remote_cluster:secure_alias"],
diff --git a/x-pack/qa/multi-node/build.gradle b/x-pack/qa/multi-node/build.gradle
index 577e213386d..c17398d80df 100644
--- a/x-pack/qa/multi-node/build.gradle
+++ b/x-pack/qa/multi-node/build.gradle
@@ -13,7 +13,7 @@ integTestCluster {
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
- setting 'logger.org.elasticsearch.xpack.security.authc', 'TRACE'
+ setting 'xpack.license.self_generated.type', 'trial'
extraConfigFile 'roles.yml', 'roles.yml'
setupCommand 'setup-test-user', 'bin/elasticsearch-users', 'useradd', 'test-user', '-p', 'x-pack-test-password', '-r', 'test'
setupCommand 'setup-super-user', 'bin/elasticsearch-users', 'useradd', 'super-user', '-p', 'x-pack-super-password', '-r', 'superuser'
diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle
index 9fd58096af0..ce3b03ebebd 100644
--- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle
+++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle
@@ -178,12 +178,13 @@ integTestCluster {
setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password'
setting 'xpack.monitoring.exporters._http.ssl.verification_mode', 'full'
- setting 'xpack.index_lifecycle.enabled', 'false'
+ setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
setting 'xpack.security.http.ssl.enabled', 'true'
setting 'xpack.security.http.ssl.keystore.path', nodeKeystore.name
keystoreSetting 'xpack.security.http.ssl.keystore.secure_password', 'keypass'
+ setting 'xpack.index_lifecycle.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
// copy keystores into config/
diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle
index 207fa8204db..4badc9d3509 100644
--- a/x-pack/qa/smoke-test-plugins/build.gradle
+++ b/x-pack/qa/smoke-test-plugins/build.gradle
@@ -1,6 +1,4 @@
import org.elasticsearch.gradle.MavenFilteringHack
-import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin
-import org.elasticsearch.gradle.plugin.PluginBuildPlugin
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@@ -18,6 +16,7 @@ project(':plugins').getChildProjects().each { pluginName, pluginProject ->
integTestCluster {
setting 'xpack.security.enabled', 'true'
+ setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupDummyUser',
'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java
index 529a1aaec27..1c8204aa1ec 100644
--- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java
+++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java
@@ -37,6 +37,8 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
private static final String TEST_ADMIN_USERNAME = "test_admin";
private static final String TEST_ADMIN_PASSWORD = "x-pack-test-password";
+ private String watchId = randomAlphaOfLength(20);
+
@Before
public void startWatcher() throws Exception {
StringEntity entity = new StringEntity("{ \"value\" : \"15\" }", ContentType.APPLICATION_JSON);
@@ -87,7 +89,6 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
@After
public void stopWatcher() throws Exception {
- adminClient().performRequest("DELETE", "_xpack/watcher/watch/my_watch");
assertOK(adminClient().performRequest("DELETE", "my_test_index"));
assertBusy(() -> {
@@ -147,14 +148,14 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
builder.startObject("condition").startObject("compare").startObject("ctx.payload.hits.total").field("gte", 1)
.endObject().endObject().endObject();
builder.startObject("actions").startObject("logging").startObject("logging")
- .field("text", "successfully ran my_watch to test for search inpput").endObject().endObject().endObject();
+ .field("text", "successfully ran " + watchId + "to test for search inpput").endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
// check history, after watch has fired
- ObjectPath objectPath = getWatchHistoryEntry("my_watch", "executed");
+ ObjectPath objectPath = getWatchHistoryEntry(watchId, "executed");
boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met");
assertThat(conditionMet, is(true));
}
@@ -174,11 +175,11 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
.field("text", "this should never be logged").endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
// check history, after watch has fired
- ObjectPath objectPath = getWatchHistoryEntry("my_watch");
+ ObjectPath objectPath = getWatchHistoryEntry(watchId);
String state = objectPath.evaluate("hits.hits.0._source.state");
assertThat(state, is("execution_not_needed"));
boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met");
@@ -201,11 +202,11 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
.endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
// check history, after watch has fired
- ObjectPath objectPath = getWatchHistoryEntry("my_watch", "executed");
+ ObjectPath objectPath = getWatchHistoryEntry(watchId, "executed");
boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met");
assertThat(conditionMet, is(true));
@@ -232,10 +233,10 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
.endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
- getWatchHistoryEntry("my_watch");
+ getWatchHistoryEntry(watchId);
Response response = adminClient().performRequest("GET", "my_test_index/doc/some-id",
Collections.singletonMap("ignore", "404"));
@@ -254,10 +255,10 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
.endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
- ObjectPath objectPath = getWatchHistoryEntry("my_watch", "executed");
+ ObjectPath objectPath = getWatchHistoryEntry(watchId, "executed");
boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met");
assertThat(conditionMet, is(true));
@@ -278,10 +279,10 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
.endObject().endObject().endObject();
builder.endObject();
- indexWatch("my_watch", builder);
+ indexWatch(watchId, builder);
}
- ObjectPath objectPath = getWatchHistoryEntry("my_watch", "executed");
+ ObjectPath objectPath = getWatchHistoryEntry(watchId, "executed");
boolean conditionMet = objectPath.evaluate("hits.hits.0._source.result.condition.met");
assertThat(conditionMet, is(true));
@@ -293,7 +294,7 @@ public class SmokeTestWatcherWithSecurityIT extends ESRestTestCase {
private void indexWatch(String watchId, XContentBuilder builder) throws Exception {
StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
- Response response = client().performRequest("PUT", "_xpack/watcher/watch/my_watch", Collections.emptyMap(), entity);
+ Response response = client().performRequest("PUT", "_xpack/watcher/watch/" + watchId, Collections.emptyMap(), entity);
assertOK(response);
Map responseMap = entityAsMap(response);
assertThat(responseMap, hasEntry("_id", watchId));
diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java
index 35a70a0aaeb..86d97d01904 100644
--- a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java
+++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java
@@ -69,9 +69,6 @@ public class SmokeTestWatcherTestSuiteIT extends ESRestTestCase {
assertThat(templateExistsResponse.getStatusLine().getStatusCode(), is(200));
}
});
-
- // TODO why does the test fail without this? relaoding isseu with the new approach? Make sure to write a unit test!
- assertOK(adminClient().performRequest("PUT", ".watches"));
}
@After
diff --git a/x-pack/qa/sql/security/build.gradle b/x-pack/qa/sql/security/build.gradle
index 35434b60c17..5c3169d9d20 100644
--- a/x-pack/qa/sql/security/build.gradle
+++ b/x-pack/qa/sql/security/build.gradle
@@ -26,6 +26,7 @@ subprojects {
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.audit.outputs', 'logfile'
setting 'xpack.security.enabled', 'true'
+ setting 'xpack.license.self_generated.type', 'trial'
// Setup roles used by tests
extraConfigFile 'roles.yml', '../roles.yml'
/* Setup the one admin user that we run the tests as.
@@ -45,6 +46,7 @@ subprojects {
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.audit.outputs', 'logfile'
setting 'xpack.security.enabled', 'true'
+ setting 'xpack.license.self_generated.type', 'trial'
// Setup roles used by tests
extraConfigFile 'roles.yml', '../roles.yml'
/* Setup the one admin user that we run the tests as.
diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSqlSpecIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSqlSpecIT.java
index caa06135959..609847f513e 100644
--- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSqlSpecIT.java
+++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/JdbcSqlSpecIT.java
@@ -5,13 +5,11 @@
*/
package org.elasticsearch.xpack.qa.sql.security;
-import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.qa.sql.jdbc.SqlSpecTestCase;
import java.util.Properties;
-@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30292")
public class JdbcSqlSpecIT extends SqlSpecTestCase {
public JdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) {
super(fileName, groupName, testName, lineNumber, query);