Merge branch 'master' into index-lifecycle

* master: (35 commits)
  DOCS: Correct mapping tags in put-template api
  DOCS: Fix broken link in the put index template api
  Add put index template api to high level rest client (#30400)
  Relax testAckedIndexing to allow document updating
  [Docs] Add snippets for POS stop tags default value
  Move respect accept header on no handler to 6.3.1
  Respect accept header on no handler (#30383)
  [Test] Add analysis-nori plugin to the vagrant tests
  [Docs] Fix bad link
  [Docs] Fix end of section in the korean plugin docs
  Expose the Lucene Korean analyzer module in a plugin (#30397)
  Docs: remove transport_client from CCS role example (#30263)
  [Rollup] Validate timezone in range queries (#30338)
  Use readFully() to read bytes from CipherInputStream (#28515)
  Fix  docs Recently merged #29229 had a doc bug that broke the doc build. This commit fixes.
  Test: remove cluster permission from CCS user (#30262)
  Add Get Settings API support to java high-level rest client (#29229)
  Watcher: Remove unneeded index deletion in tests
  Set the new lucene version for 6.4.0
  [ML][TEST] Clean up jobs in ModelPlotIT
  ...
This commit is contained in:
Jason Tedor 2018-05-06 18:17:36 -04:00
commit ec71144040
140 changed files with 4431 additions and 394 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 7.3.0
lucene = 7.4.0-snapshot-1ed95c097b
# optional dependencies
spatial4j = 0.7

View File

@ -43,12 +43,16 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import java.io.IOException;
import java.util.Collections;
@ -265,6 +269,28 @@ public final class IndicesClient {
listener, emptySet(), headers);
}
/**
* Retrieve the settings of one or more indices
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-settings.html">
* Indices Get Settings API on elastic.co</a>
*/
public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, Header... headers) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, RequestConverters::getSettings,
GetSettingsResponse::fromXContent, emptySet(), headers);
}
/**
* Asynchronously retrieve the settings of one or more indices
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-settings.html">
* Indices Get Settings API on elastic.co</a>
*/
public void getSettingsAsync(GetSettingsRequest getSettingsRequest, ActionListener<GetSettingsResponse> listener, Header... headers) {
restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, RequestConverters::getSettings,
GetSettingsResponse::fromXContent, listener, emptySet(), headers);
}
/**
* Force merge one or more indices using the Force Merge API
* <p>
@ -432,4 +458,26 @@ public final class IndicesClient {
UpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
}
/**
* Puts an index template using the Index Templates API
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html"> Index Templates API
* on elastic.co</a>
*/
public PutIndexTemplateResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest, Header... headers) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate,
PutIndexTemplateResponse::fromXContent, emptySet(), headers);
}
/**
* Asynchronously puts an index template using the Index Templates API
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html"> Index Templates API
* on elastic.co</a>
*/
public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest,
ActionListener<PutIndexTemplateResponse> listener, Header... headers) {
restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate,
PutIndexTemplateResponse::fromXContent, listener, emptySet(), headers);
}
}

View File

@ -44,8 +44,10 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
@ -76,7 +78,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -85,10 +86,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.StringJoiner;
final class RequestConverters {
@ -600,6 +598,22 @@ final class RequestConverters {
return request;
}
static Request getSettings(GetSettingsRequest getSettingsRequest) throws IOException {
String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices();
String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names();
String endpoint = endpoint(indices, "_settings", names);
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params(request);
params.withIndicesOptions(getSettingsRequest.indicesOptions());
params.withLocal(getSettingsRequest.local());
params.withIncludeDefaults(getSettingsRequest.includeDefaults());
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
return request;
}
static Request indicesExist(GetIndexRequest getIndexRequest) {
// this can be called with no indices as argument by transport client, not via REST though
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
@ -630,6 +644,21 @@ final class RequestConverters {
return request;
}
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
Params params = new Params(request);
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
if (putIndexTemplateRequest.create()) {
params.putParam("create", Boolean.TRUE.toString());
}
if (Strings.hasText(putIndexTemplateRequest.cause())) {
params.putParam("cause", putIndexTemplateRequest.cause());
}
request.setEntity(createEntity(putIndexTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));

View File

@ -51,14 +51,19 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
@ -71,11 +76,19 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractRawValues;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
@ -189,6 +202,108 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
}
}
public void testGetSettings() throws IOException {
String indexName = "get_settings_index";
Settings basicSettings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(indexName, basicSettings);
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName);
GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval"));
assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
updateIndexSettings(indexName, Settings.builder().put("refresh_interval", "30s"));
GetSettingsResponse updatedResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertEquals("30s", updatedResponse.getSetting(indexName, "index.refresh_interval"));
}
public void testGetSettingsNonExistentIndex() throws IOException {
String nonExistentIndex = "index_that_doesnt_exist";
assertFalse(indexExists(nonExistentIndex));
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(nonExistentIndex);
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
() -> execute(getSettingsRequest, highLevelClient().indices()::getSettings, highLevelClient().indices()::getSettingsAsync));
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
public void testGetSettingsFromMultipleIndices() throws IOException {
String indexName1 = "get_multiple_settings_one";
createIndex(indexName1, Settings.builder().put("number_of_shards", 2).build());
String indexName2 = "get_multiple_settings_two";
createIndex(indexName2, Settings.builder().put("number_of_shards", 3).build());
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices("get_multiple_settings*");
GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertEquals("2", getSettingsResponse.getSetting(indexName1, "index.number_of_shards"));
assertEquals("3", getSettingsResponse.getSetting(indexName2, "index.number_of_shards"));
}
public void testGetSettingsFiltered() throws IOException {
String indexName = "get_settings_index";
Settings basicSettings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(indexName, basicSettings);
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).names("index.number_of_shards");
GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas"));
assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
assertEquals(1, getSettingsResponse.getIndexToSettings().get("get_settings_index").size());
}
public void testGetSettingsWithDefaults() throws IOException {
String indexName = "get_settings_index";
Settings basicSettings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(indexName, basicSettings);
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true);
GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertNotNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval"));
assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL,
getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").getAsTime("index.refresh_interval", null));
assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
}
public void testGetSettingsWithDefaultsFiltered() throws IOException {
String indexName = "get_settings_index";
Settings basicSettings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(indexName, basicSettings);
GetSettingsRequest getSettingsRequest = new GetSettingsRequest()
.indices(indexName)
.names("index.refresh_interval")
.includeDefaults(true);
GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings,
highLevelClient().indices()::getSettingsAsync);
assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas"));
assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_shards"));
assertEquals(0, getSettingsResponse.getIndexToSettings().get("get_settings_index").size());
assertEquals(1, getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").size());
}
public void testPutMapping() throws IOException {
{
// Add mappings to index
@ -708,4 +823,59 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
+ "or check the breaking changes documentation for removed settings]"));
}
@SuppressWarnings("unchecked")
public void testPutTemplate() throws Exception {
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
.name("my-template")
.patterns(Arrays.asList("pattern-1", "name-*"))
.order(10)
.create(randomBoolean())
.settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0"))
.mapping("doc", "host_name", "type=keyword", "description", "type=text")
.alias(new Alias("alias-1").indexRouting("abc")).alias(new Alias("{index}-write").searchRouting("xyz"));
PutIndexTemplateResponse putTemplateResponse = execute(putTemplateRequest,
highLevelClient().indices()::putTemplate, highLevelClient().indices()::putTemplateAsync);
assertThat(putTemplateResponse.isAcknowledged(), equalTo(true));
Map<String, Object> templates = getAsMap("/_template/my-template");
assertThat(templates.keySet(), hasSize(1));
assertThat(extractValue("my-template.order", templates), equalTo(10));
assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*"));
assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3"));
assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0"));
assertThat(extractValue("my-template.mappings.doc.properties.host_name.type", templates), equalTo("keyword"));
assertThat(extractValue("my-template.mappings.doc.properties.description.type", templates), equalTo("text"));
assertThat((Map<String, String>) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc"));
assertThat((Map<String, String>) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz"));
}
public void testPutTemplateBadRequests() throws Exception {
RestHighLevelClient client = highLevelClient();
// Failed to validate because index patterns are missing
PutIndexTemplateRequest withoutPattern = new PutIndexTemplateRequest("t1");
ValidationException withoutPatternError = expectThrows(ValidationException.class,
() -> execute(withoutPattern, client.indices()::putTemplate, client.indices()::putTemplateAsync));
assertThat(withoutPatternError.validationErrors(), contains("index patterns are missing"));
// Create-only specified but an template exists already
PutIndexTemplateRequest goodTemplate = new PutIndexTemplateRequest("t2").patterns(Arrays.asList("qa-*", "prod-*"));
assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged());
goodTemplate.create(true);
ElasticsearchException alreadyExistsError = expectThrows(ElasticsearchException.class,
() -> execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync));
assertThat(alreadyExistsError.getDetailedMessage(),
containsString("[type=illegal_argument_exception, reason=index_template [t2] already exists]"));
goodTemplate.create(false);
assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged());
// Rejected due to unknown settings
PutIndexTemplateRequest unknownSettingTemplate = new PutIndexTemplateRequest("t3")
.patterns(Collections.singletonList("any"))
.settings(Settings.builder().put("this-setting-does-not-exist", 100));
ElasticsearchStatusException unknownSettingError = expectThrows(ElasticsearchStatusException.class,
() -> execute(unknownSettingTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync));
assertThat(unknownSettingError.getDetailedMessage(), containsString("unknown setting [index.this-setting-does-not-exist]"));
}
}

View File

@ -26,12 +26,11 @@ import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
@ -46,9 +45,11 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.delete.DeleteRequest;
@ -69,6 +70,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
@ -76,14 +78,13 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.client.RequestConverters.Params;
import org.elasticsearch.index.RandomCreateIndexGenerator;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.TermQueryBuilder;
@ -92,7 +93,6 @@ import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.rankeval.RankEvalSpec;
import org.elasticsearch.index.rankeval.RatedRequest;
import org.elasticsearch.index.rankeval.RestRankEvalAction;
import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
@ -109,8 +109,6 @@ import org.elasticsearch.test.RandomObjects;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -119,7 +117,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.StringJoiner;
import java.util.function.Consumer;
import java.util.function.Function;
@ -405,6 +402,52 @@ public class RequestConvertersTests extends ESTestCase {
assertNull(request.getEntity());
}
public void testGetSettings() throws IOException {
String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5);
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest);
Map<String, String> expectedParams = new HashMap<>();
setRandomMasterTimeout(getSettingsRequest, expectedParams);
setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, expectedParams);
setRandomLocal(getSettingsRequest, expectedParams);
if (randomBoolean()) {
//the request object will not have include_defaults present unless it is set to true
getSettingsRequest.includeDefaults(randomBoolean());
if (getSettingsRequest.includeDefaults()) {
expectedParams.put("include_defaults", Boolean.toString(true));
}
}
StringJoiner endpoint = new StringJoiner("/", "/", "");
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
endpoint.add(String.join(",", indicesUnderTest));
}
endpoint.add("_settings");
if (randomBoolean()) {
String[] names = randomBoolean() ? null : new String[randomIntBetween(0, 3)];
if (names != null) {
for (int x = 0; x < names.length; x++) {
names[x] = randomAlphaOfLengthBetween(3, 10);
}
}
getSettingsRequest.names(names);
if (names != null && names.length > 0) {
endpoint.add(String.join(",", names));
}
}
Request request = RequestConverters.getSettings(getSettingsRequest);
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
assertThat(request.getParameters(), equalTo(expectedParams));
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
assertThat(request.getEntity(), nullValue());
}
public void testDeleteIndexEmptyIndices() {
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate();
@ -1384,6 +1427,48 @@ public class RequestConvertersTests extends ESTestCase {
assertEquals(expectedParams, request.getParameters());
}
public void testPutTemplateRequest() throws Exception {
Map<String, String> names = new HashMap<>();
names.put("log", "log");
names.put("template#1", "template%231");
names.put("-#template", "-%23template");
names.put("foo^bar", "foo%5Ebar");
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
.name(randomFrom(names.keySet()))
.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
if (randomBoolean()) {
putTemplateRequest.order(randomInt());
}
if (randomBoolean()) {
putTemplateRequest.version(randomInt());
}
if (randomBoolean()) {
putTemplateRequest.settings(Settings.builder().put("setting-" + randomInt(), randomTimeValue()));
}
if (randomBoolean()) {
putTemplateRequest.mapping("doc-" + randomInt(), "field-" + randomInt(), "type=" + randomFrom("text", "keyword"));
}
if (randomBoolean()) {
putTemplateRequest.alias(new Alias("alias-" + randomInt()));
}
Map<String, String> expectedParams = new HashMap<>();
if (randomBoolean()) {
expectedParams.put("create", Boolean.TRUE.toString());
putTemplateRequest.create(true);
}
if (randomBoolean()) {
String cause = randomUnicodeOfCodepointLengthBetween(1, 50);
putTemplateRequest.cause(cause);
expectedParams.put("cause", cause);
}
setRandomMasterTimeout(putTemplateRequest, expectedParams);
Request request = RequestConverters.putTemplate(putTemplateRequest);
assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name())));
assertThat(request.getParameters(), equalTo(expectedParams));
assertToXContentBody(putTemplateRequest, request.getEntity());
}
private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException {
BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false);
assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue());

View File

@ -50,9 +50,15 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeResponse;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.IndicesOptions;
@ -69,11 +75,14 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
/**
* This class is used to generate the Java Indices API documentation.
* You need to wrap your code between two tags like:
@ -775,6 +784,119 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
public void testGetSettings() throws Exception {
RestHighLevelClient client = highLevelClient();
{
Settings settings = Settings.builder().put("number_of_shards", 3).build();
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings));
assertTrue(createIndexResponse.isAcknowledged());
}
// tag::get-settings-request
GetSettingsRequest request = new GetSettingsRequest().indices("index"); // <1>
// end::get-settings-request
// tag::get-settings-request-names
request.names("index.number_of_shards"); // <1>
// end::get-settings-request-names
// tag::get-settings-request-indicesOptions
request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
// end::get-settings-request-indicesOptions
// tag::get-settings-execute
GetSettingsResponse getSettingsResponse = client.indices().getSettings(request);
// end::get-settings-execute
// tag::get-settings-response
String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards"); // <1>
Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index"); // <2>
Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <3>
// end::get-settings-response
assertEquals("3", numberOfShardsString);
assertEquals(Integer.valueOf(3), numberOfShards);
assertNull("refresh_interval returned but was never set!",
getSettingsResponse.getSetting("index", "index.refresh_interval"));
// tag::get-settings-execute-listener
ActionListener<GetSettingsResponse> listener =
new ActionListener<GetSettingsResponse>() {
@Override
public void onResponse(GetSettingsResponse GetSettingsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-settings-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-settings-execute-async
client.indices().getSettingsAsync(request, listener); // <1>
// end::get-settings-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testGetSettingsWithDefaults() throws Exception {
RestHighLevelClient client = highLevelClient();
{
Settings settings = Settings.builder().put("number_of_shards", 3).build();
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings));
assertTrue(createIndexResponse.isAcknowledged());
}
GetSettingsRequest request = new GetSettingsRequest().indices("index");
request.indicesOptions(IndicesOptions.lenientExpandOpen());
// tag::get-settings-request-include-defaults
request.includeDefaults(true); // <1>
// end::get-settings-request-include-defaults
GetSettingsResponse getSettingsResponse = client.indices().getSettings(request);
String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards");
Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index");
Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null);
// tag::get-settings-defaults-response
String refreshInterval = getSettingsResponse.getSetting("index", "index.refresh_interval"); // <1>
Settings indexDefaultSettings = getSettingsResponse.getIndexToDefaultSettings().get("index"); // <2>
// end::get-settings-defaults-response
assertEquals("3", numberOfShardsString);
assertEquals(Integer.valueOf(3), numberOfShards);
assertNotNull("with defaults enabled we should get a value for refresh_interval!", refreshInterval);
assertEquals(refreshInterval, indexDefaultSettings.get("index.refresh_interval"));
ActionListener<GetSettingsResponse> listener =
new ActionListener<GetSettingsResponse>() {
@Override
public void onResponse(GetSettingsResponse GetSettingsResponse) {
}
@Override
public void onFailure(Exception e) {
}
};
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
client.indices().getSettingsAsync(request, listener);
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testForceMergeIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1483,4 +1605,164 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testPutTemplate() throws Exception {
RestHighLevelClient client = highLevelClient();
// tag::put-template-request
PutIndexTemplateRequest request = new PutIndexTemplateRequest("my-template"); // <1>
request.patterns(Arrays.asList("pattern-1", "log-*")); // <2>
// end::put-template-request
// tag::put-template-request-settings
request.settings(Settings.builder() // <1>
.put("index.number_of_shards", 3)
.put("index.number_of_replicas", 1)
);
// end::put-template-request-settings
{
// tag::put-template-request-mappings-json
request.mapping("tweet", // <1>
"{\n" +
" \"tweet\": {\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}", // <2>
XContentType.JSON);
// end::put-template-request-mappings-json
assertTrue(client.indices().putTemplate(request).isAcknowledged());
}
{
//tag::put-template-request-mappings-map
Map<String, Object> jsonMap = new HashMap<>();
Map<String, Object> message = new HashMap<>();
message.put("type", "text");
Map<String, Object> properties = new HashMap<>();
properties.put("message", message);
Map<String, Object> tweet = new HashMap<>();
tweet.put("properties", properties);
jsonMap.put("tweet", tweet);
request.mapping("tweet", jsonMap); // <1>
//end::put-template-request-mappings-map
assertTrue(client.indices().putTemplate(request).isAcknowledged());
}
{
//tag::put-template-request-mappings-xcontent
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
{
builder.startObject("tweet");
{
builder.startObject("properties");
{
builder.startObject("message");
{
builder.field("type", "text");
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
request.mapping("tweet", builder); // <1>
//end::put-template-request-mappings-xcontent
assertTrue(client.indices().putTemplate(request).isAcknowledged());
}
{
//tag::put-template-request-mappings-shortcut
request.mapping("tweet", "message", "type=text"); // <1>
//end::put-template-request-mappings-shortcut
assertTrue(client.indices().putTemplate(request).isAcknowledged());
}
// tag::put-template-request-aliases
request.alias(new Alias("twitter_alias").filter(QueryBuilders.termQuery("user", "kimchy"))); // <1>
request.alias(new Alias("{index}_alias").searchRouting("xyz")); // <2>
// end::put-template-request-aliases
// tag::put-template-request-order
request.order(20); // <1>
// end::put-template-request-order
// tag::put-template-request-version
request.version(4); // <1>
// end::put-template-request-version
// tag::put-template-whole-source
request.source("{\n" +
" \"index_patterns\": [\n" +
" \"log-*\",\n" +
" \"pattern-1\"\n" +
" ],\n" +
" \"order\": 1,\n" +
" \"settings\": {\n" +
" \"number_of_shards\": 1\n" +
" },\n" +
" \"mappings\": {\n" +
" \"tweet\": {\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
" }\n" +
" },\n" +
" \"aliases\": {\n" +
" \"alias-1\": {},\n" +
" \"{index}-alias\": {}\n" +
" }\n" +
"}", XContentType.JSON); // <1>
// end::put-template-whole-source
// tag::put-template-request-create
request.create(true); // <1>
// end::put-template-request-create
// tag::put-template-request-masterTimeout
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
request.masterNodeTimeout("1m"); // <2>
// end::put-template-request-masterTimeout
request.create(false); // make test happy
// tag::put-template-execute
PutIndexTemplateResponse putTemplateResponse = client.indices().putTemplate(request);
// end::put-template-execute
// tag::put-template-response
boolean acknowledged = putTemplateResponse.isAcknowledged(); // <1>
// end::put-template-response
assertTrue(acknowledged);
// tag::put-template-execute-listener
ActionListener<PutIndexTemplateResponse> listener =
new ActionListener<PutIndexTemplateResponse>() {
@Override
public void onResponse(PutIndexTemplateResponse putTemplateResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-template-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-template-execute-async
client.indices().putTemplateAsync(request, listener); // <1>
// end::put-template-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}

View File

@ -3,6 +3,10 @@
[partintro]
--
// To add a release, copy and paste the template text
// and add a link to the new section. Note that release subheads must
// be floated and sections cannot be empty.
// Use these for links to issue and pulls. Note issues and pulls redirect one to
// each other on Github, so don't worry too much on using the right prefix.
:issue: https://github.com/elastic/elasticsearch/issues/
@ -12,13 +16,52 @@ This section summarizes the changes in each release.
* <<release-notes-7.0.0>>
* <<release-notes-6.4.0>>
* <<release-notes-6.3.1>>
--
////
// To add a release, copy and paste the following text, uncomment the relevant
// sections, and add a link to the new section in the list of releases at the
// top of the page. Note that release subheads must be floated and sections
// cannot be empty.
// TEMPLATE:
// [[release-notes-n.n.n]]
// == {es} n.n.n
//[float]
[[breaking-n.n.n]]
//=== Breaking Changes
//[float]
//=== Breaking Java Changes
//[float]
//=== Deprecations
//[float]
//=== New Features
//[float]
//=== Enhancements
//[float]
//=== Bug Fixes
//[float]
//=== Regressions
//[float]
//=== Known Issues
////
[[release-notes-7.0.0]]
== {es} 7.0.0
coming[7.0.0]
[float]
[[breaking-7.0.0]]
=== Breaking Changes
@ -37,6 +80,10 @@ Machine Learning::
* <<remove-http-enabled, Removed `http.enabled` setting>> ({pull}29601[#29601])
//[float]
//=== Breaking Java Changes
[float]
=== Deprecations
Monitoring::
* The `xpack.monitoring.collection.interval` setting can no longer be set to `-1`
@ -48,6 +95,106 @@ Security::
mappings, get field mappings, and field capabilities API are now only the
ones that the user is authorized to access in case field level security is enabled.
//[float]
//=== New Features
//[float]
//=== Enhancements
[float]
=== Bug Fixes
Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions
({pull}29000[#29000])
Rollup::
* Validate timezone in range queries to ensure they match the selected job when
searching ({pull}30338[#30338])
[float]
=== Regressions
Fail snapshot operations early when creating or deleting a snapshot on a repository that has been
written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140])
Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
//[float]
//=== Regressions
//[float]
//=== Known Issues
[[release-notes-6.4.0]]
== {es} 6.4.0
coming[6.4.0]
//[float]
[[breaking-6.4.0]]
//=== Breaking Changes
//[float]
//=== Breaking Java Changes
//[float]
//=== Deprecations
[float]
=== New Features
The new <<mapping-ignored-field,`_ignored`>> field allows to know which fields
got ignored at index time because of the <<ignore-malformed,`ignore_malformed`>>
option. ({pull}30140[#29658])
A new analysis plugin called `analysis_nori` that exposes the Lucene Korean
analysis module. ({pull}30397[#30397])
[float]
=== Enhancements
{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow copying source settings on index resize operations] ({pull}30255[#30255])
Added new "Request" object flavored request methods. Prefer these instead of the
multi-argument versions. ({pull}29623[#29623])
The cluster state listener to decide if watcher should be
stopped/started/paused now runs far less code in an executor but is more
synchronous and predictable. Also the trigger engine thread is only started on
data nodes. And the Execute Watch API can be triggered regardless is watcher is
started or stopped. ({pull}30118[#30118])
Added put index template API to the high level rest client ({pull}30400[#30400])
[float]
=== Bug Fixes
Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216])
Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
Machine Learning::
* Account for gaps in data counts after job is reopened ({pull}30294[#30294])
Rollup::
* Validate timezone in range queries to ensure they match the selected job when
searching ({pull}30338[#30338])
//[float]
//=== Regressions
//[float]
//=== Known Issues
[[release-notes-6.3.1]]
== Elasticsearch version 6.3.1
coming[6.3.1]
//[float]
[[breaking-6.3.1]]
//=== Breaking Changes
//[float]
//=== Breaking Java Changes
@ -63,51 +210,9 @@ ones that the user is authorized to access in case field level security is enabl
[float]
=== Bug Fixes
Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions
({pull}29000[#29000])
Reduce the number of object allocations made by {security} when resolving the indices and aliases for a request ({pull}30180[#30180])
=== Regressions
Fail snapshot operations early when creating or deleting a snapshot on a repository that has been
written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140])
Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
//[float]
//=== Regressions
//[float]
//=== Known Issues
[[release-notes-6.4.0]]
== {es} 6.4.0
[float]
=== New Features
The new <<mapping-ignored-field,`_ignored`>> field allows to know which fields
got ignored at index time because of the <<ignore-malformed,`ignore_malformed`>>
option. ({pull}30140[#29658])
[float]
=== Enhancements
{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow copying source settings on index resize operations] ({pull}30255[#30255])
Added new "Request" object flavored request methods. Prefer these instead of the
multi-argument versions. ({pull}29623[#29623])
The cluster state listener to decide if watcher should be
stopped/started/paused now runs far less code in an executor but is more
synchronous and predictable. Also the trigger engine thread is only started on
data nodes. And the Execute Watch API can be triggered regardless is watcher is
started or stopped. ({pull}30118[#30118])
[float]
=== Bug Fixes
Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216])
Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641])
Respect accept header on requests with no handler ({pull}30383[#30383])
//[float]
//=== Regressions

View File

@ -1,7 +1,7 @@
:version: 7.0.0-alpha1
:major-version: 7.x
:lucene_version: 7.3.0
:lucene_version_path: 7_3_0
:lucene_version: 7.4.0
:lucene_version_path: 7_4_0
:branch: master
:jdk: 1.8.0_131
:jdk_major: 8

View File

@ -32,6 +32,7 @@ integTestCluster {
configFile 'analysis/synonym.txt'
configFile 'analysis/stemmer_override.txt'
configFile 'userdict_ja.txt'
configFile 'userdict_ko.txt'
configFile 'KeywordTokenizer.rbbi'
extraConfigFile 'hunspell/en_US/en_US.aff', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.aff'
extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic'

View File

@ -0,0 +1,96 @@
[[java-rest-high-get-settings]]
=== Get Settings API
[[java-rest-high-get-settings-request]]
==== Get Settings Request
A `GetSettingsRequest` requires one or more `index` arguments:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request]
--------------------------------------------------
<1> The index whose settings we should retrieve
==== Optional arguments
The following arguments can optionally be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-names]
--------------------------------------------------
<1> One or more settings that be the only settings retrieved. If unset, all settings will be retrieved
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-include-defaults]
--------------------------------------------------
<1> If true, defaults will be returned for settings not explicitly set on the index
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-indicesOptions]
--------------------------------------------------
<1> Setting `IndicesOptions` controls how unavailable indices are resolved and
how wildcard expressions are expanded
[[java-rest-high-get-settings-sync]]
==== Synchronous Execution
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute]
--------------------------------------------------
[[java-rest-high-get-settings-async]]
==== Asynchronous Execution
The asynchronous execution of a Get Settings request requires both the `GetSettingsRequest`
instance and an `ActionListener` instance to be passed to the asynchronous
method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-async]
--------------------------------------------------
<1> The `GetSettingsRequest` to execute and the `ActionListener` to use when
the execution completes
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A typical listener for `GetSettingsResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
<2> Called in case of failure. The raised exception is provided as an argument
[[java-rest-high-get-settings-response]]
==== Get Settings Response
The returned `GetSettingsResponse` allows to retrieve information about the
executed operation as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-response]
--------------------------------------------------
<1> We can retrieve the setting value for a particular index directly from the response as a string
<2> We can also retrieve the Settings object for a particular index for further examination
<3> The returned Settings object provides convenience methods for non String types
If the `includeDefaults` flag was set to true in the `GetSettingsRequest`, the
behavior of `GetSettingsResponse` will differ somewhat.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-defaults-response]
--------------------------------------------------
<1> Individual default setting values may be retrieved directly from the `GetSettingsResponse`
<2> We may retrieve a Settings object for an index that contains those settings with default values

View File

@ -0,0 +1,168 @@
[[java-rest-high-put-template]]
=== Put Template API
[[java-rest-high-put-template-request]]
==== Put Index Template Request
A `PutIndexTemplateRequest` specifies the `name` of a template and `patterns`
which controls whether the template should be applied to the new index.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request]
--------------------------------------------------
<1> The name of the template
<2> The patterns of the template
==== Settings
The settings of the template will be applied to the new index whose name matches the
template's patterns.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-settings]
--------------------------------------------------
<1> Settings for this template
[[java-rest-high-put-template-request-mappings]]
==== Mappings
The mapping of the template will be applied to the new index whose name matches the
template's patterns.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-json]
--------------------------------------------------
<1> The type to define
<2> The mapping for this type, provided as a JSON string
The mapping source can be provided in different ways in addition to the
`String` example shown above:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-map]
--------------------------------------------------
<1> Mapping source provided as a `Map` which gets automatically converted
to JSON format
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-xcontent]
--------------------------------------------------
<1> Mapping source provided as an `XContentBuilder` object, the Elasticsearch
built-in helpers to generate JSON content
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-mappings-shortcut]
--------------------------------------------------
<1> Mapping source provided as `Object` key-pairs, which gets converted to
JSON format
==== Aliases
The aliases of the template will define aliasing to the index whose name matches the
template's patterns. A placeholder `{index}` can be used in an alias of a template.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-aliases]
--------------------------------------------------
<1> The alias to define
<2> The alias to define with placeholder
==== Order
In case multiple templates match an index, the orders of matching templates determine
the sequence that settings, mappings, and alias of each matching template is applied.
Templates with lower orders are applied first, and higher orders override them.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-order]
--------------------------------------------------
<1> The order of the template
==== Version
A template can optionally specify a version number which can be used to simplify template
management by external systems.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-version]
--------------------------------------------------
<1> The version number of the template
==== Providing the whole source
The whole source including all of its sections (mappings, settings and aliases)
can also be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-whole-source]
--------------------------------------------------
<1> The source provided as a JSON string. It can also be provided as a `Map`
or an `XContentBuilder`.
==== Optional arguments
The following arguments can optionally be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-create]
--------------------------------------------------
<1> To force to only create a new template; do not overwrite the existing template
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-request-masterTimeout]
--------------------------------------------------
<1> Timeout to connect to the master node as a `TimeValue`
<2> Timeout to connect to the master node as a `String`
[[java-rest-high-put-template-sync]]
==== Synchronous Execution
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute]
--------------------------------------------------
[[java-rest-high-put-template-async]]
==== Asynchronous Execution
The asynchronous execution of a put template request requires both the `PutIndexTemplateRequest`
instance and an `ActionListener` instance to be passed to the asynchronous method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute-async]
--------------------------------------------------
<1> The `PutIndexTemplateRequest` to execute and the `ActionListener` to use when
the execution completes
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A typical listener for `PutIndexTemplateResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
<2> Called in case of failure. The raised exception is provided as an argument
[[java-rest-high-put-template-response]]
==== Put Index Template Response
The returned `PutIndexTemplateResponse` allows to retrieve information about the
executed operation as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-template-response]
--------------------------------------------------
<1> Indicates whether all of the nodes have acknowledged the request

View File

@ -69,6 +69,7 @@ Index Management::
* <<java-rest-high-force-merge>>
* <<java-rest-high-rollover-index>>
* <<java-rest-high-indices-put-settings>>
* <<java-rest-high-get-settings>>
Mapping Management::
* <<java-rest-high-put-mapping>>
@ -93,6 +94,8 @@ include::indices/put_mapping.asciidoc[]
include::indices/update_aliases.asciidoc[]
include::indices/exists_alias.asciidoc[]
include::indices/put_settings.asciidoc[]
include::indices/get_settings.asciidoc[]
include::indices/put_template.asciidoc[]
== Cluster APIs

View File

@ -0,0 +1,407 @@
[[analysis-nori]]
=== Korean (nori) Analysis Plugin
The Korean (nori) Analysis plugin integrates Lucene nori analysis
module into elasticsearch. It uses the https://bitbucket.org/eunjeon/mecab-ko-dic[mecab-ko-dic dictionary]
to perform morphological analysis of Korean texts.
:plugin_name: analysis-nori
include::install_remove.asciidoc[]
[[analysis-nori-analyzer]]
==== `nori` analyzer
The `nori` analyzer consists of the following tokenizer and token filters:
* <<analysis-nori-tokenizer,`nori_tokenizer`>>
* <<analysis-nori-speech,`nori_part_of_speech`>> token filter
* <<analysis-nori-readingform,`nori_readingform`>> token filter
* {ref}/analysis-lowercase-tokenfilter.html[`lowercase`] token filter
It supports the `decompound_mode` and `user_dictionary` settings from
<<analysis-nori-tokenizer,`nori_tokenizer`>> and the `stoptags` setting from
<<analysis-nori-speech,`nori_part_of_speech`>>.
[[analysis-nori-tokenizer]]
==== `nori_tokenizer`
The `nori_tokenizer` accepts the following settings:
`decompound_mode`::
+
--
The decompound mode determines how the tokenizer handles compound tokens.
It can be set to:
`none`::
No decomposition for compounds. Example output:
가거도항
가곡역
`discard`::
Decomposes compounds and discards the original form (*default*). Example output:
가곡역 => 가곡, 역
`mixed`::
Decomposes compounds and keeps the original form. Example output:
가곡역 => 가곡역, 가곡, 역
--
`user_dictionary`::
+
--
The Nori tokenizer uses the https://bitbucket.org/eunjeon/mecab-ko-dic[mecab-ko-dic dictionary] by default.
A `user_dictionary` with custom nouns (`NNG`) may be appended to the default dictionary.
The dictionary should have the following format:
[source,txt]
-----------------------
<token> [<token 1> ... <token n>]
-----------------------
The first token is mandatory and represents the custom noun that should be added in
the dictionary. For compound nouns the custom segmentation can be provided
after the first token (`[<token 1> ... <token n>]`). The segmentation of the
custom compound nouns is controlled by the `decompound_mode` setting.
--
As a demonstration of how the user dictionary can be used, save the following
dictionary to `$ES_HOME/config/userdict_ko.txt`:
[source,txt]
-----------------------
c++ <1>
C샤프
세종
세종시 세종 시 <2>
-----------------------
<1> A simple noun
<2> A compound noun (`세종시`) followed by its decomposition: `세종` and `시`.
Then create an analyzer as follows:
[source,js]
--------------------------------------------------
PUT nori_sample
{
"settings": {
"index": {
"analysis": {
"tokenizer": {
"nori_user_dict": {
"type": "nori_tokenizer",
"decompound_mode": "mixed",
"user_dictionary": "userdict_ko.txt"
}
},
"analyzer": {
"my_analyzer": {
"type": "custom",
"tokenizer": "nori_user_dict"
}
}
}
}
}
}
GET nori_sample/_analyze
{
"analyzer": "my_analyzer",
"text": "세종시" <1>
}
--------------------------------------------------
// CONSOLE
<1> Sejong city
The above `analyze` request returns the following:
[source,js]
--------------------------------------------------
{
"tokens" : [ {
"token" : "세종시",
"start_offset" : 0,
"end_offset" : 3,
"type" : "word",
"position" : 0,
"positionLength" : 2 <1>
}, {
"token" : "세종",
"start_offset" : 0,
"end_offset" : 2,
"type" : "word",
"position" : 0
}, {
"token" : "시",
"start_offset" : 2,
"end_offset" : 3,
"type" : "word",
"position" : 1
}]
}
--------------------------------------------------
// TESTRESPONSE
<1> This is a compound token that spans two positions (`mixed` mode).
The `nori_tokenizer` sets a number of additional attributes per token that are used by token filters
to modify the stream.
You can view all these additional attributes with the following request:
[source,js]
--------------------------------------------------
GET _analyze
{
"tokenizer": "nori_tokenizer",
"text": "뿌리가 깊은 나무는", <1>
"attributes" : ["posType", "leftPOS", "rightPOS", "morphemes", "reading"],
"explain": true
}
--------------------------------------------------
// CONSOLE
<1> A tree with deep roots
Which responds with:
[source,js]
--------------------------------------------------
{
"detail": {
"custom_analyzer": true,
"charfilters": [],
"tokenizer": {
"name": "nori_tokenizer",
"tokens": [
{
"token": "뿌리",
"start_offset": 0,
"end_offset": 2,
"type": "word",
"position": 0,
"leftPOS": "NNG(General Noun)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "NNG(General Noun)"
},
{
"token": "가",
"start_offset": 2,
"end_offset": 3,
"type": "word",
"position": 1,
"leftPOS": "J(Ending Particle)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "J(Ending Particle)"
},
{
"token": "깊",
"start_offset": 4,
"end_offset": 5,
"type": "word",
"position": 2,
"leftPOS": "VA(Adjective)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "VA(Adjective)"
},
{
"token": "은",
"start_offset": 5,
"end_offset": 6,
"type": "word",
"position": 3,
"leftPOS": "E(Verbal endings)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "E(Verbal endings)"
},
{
"token": "나무",
"start_offset": 7,
"end_offset": 9,
"type": "word",
"position": 4,
"leftPOS": "NNG(General Noun)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "NNG(General Noun)"
},
{
"token": "는",
"start_offset": 9,
"end_offset": 10,
"type": "word",
"position": 5,
"leftPOS": "J(Ending Particle)",
"morphemes": null,
"posType": "MORPHEME",
"reading": null,
"rightPOS": "J(Ending Particle)"
}
]
},
"tokenfilters": []
}
}
--------------------------------------------------
// TESTRESPONSE
[[analysis-nori-speech]]
==== `nori_part_of_speech` token filter
The `nori_part_of_speech` token filter removes tokens that match a set of
part-of-speech tags. The list of supported tags and their meanings can be found here:
{lucene_version_path}/org/apache/lucene/analysis/ko/POS.Tag.html[Part of speech tags]
It accepts the following setting:
`stoptags`::
An array of part-of-speech tags that should be removed.
and defaults to:
[source,js]
--------------------------------------------------
"stoptags": [
"E",
"IC",
"J",
"MAG", "MAJ", "MM",
"SP", "SSC", "SSO", "SC", "SE",
"XPN", "XSA", "XSN", "XSV",
"UNA", "NA", "VSV"
]
--------------------------------------------------
// NOTCONSOLE
For example:
[source,js]
--------------------------------------------------
PUT nori_sample
{
"settings": {
"index": {
"analysis": {
"analyzer": {
"my_analyzer": {
"tokenizer": "nori_tokenizer",
"filter": [
"my_posfilter"
]
}
},
"filter": {
"my_posfilter": {
"type": "nori_part_of_speech",
"stoptags": [
"NR" <1>
]
}
}
}
}
}
}
GET nori_sample/_analyze
{
"analyzer": "my_analyzer",
"text": "여섯 용이" <2>
}
--------------------------------------------------
// CONSOLE
<1> Korean numerals should be removed (`NR`)
<2> Six dragons
Which responds with:
[source,js]
--------------------------------------------------
{
"tokens" : [ {
"token" : "용",
"start_offset" : 3,
"end_offset" : 4,
"type" : "word",
"position" : 1
}, {
"token" : "이",
"start_offset" : 4,
"end_offset" : 5,
"type" : "word",
"position" : 2
} ]
}
--------------------------------------------------
// TESTRESPONSE
[[analysis-nori-readingform]]
==== `nori_readingform` token filter
The `nori_readingform` token filter rewrites tokens written in Hanja to their Hangul form.
[source,js]
--------------------------------------------------
PUT nori_sample
{
"settings": {
"index":{
"analysis":{
"analyzer" : {
"my_analyzer" : {
"tokenizer" : "nori_tokenizer",
"filter" : ["nori_readingform"]
}
}
}
}
}
}
GET nori_sample/_analyze
{
"analyzer": "my_analyzer",
"text": "鄕歌" <1>
}
--------------------------------------------------
// CONSOLE
<1> A token written in Hanja: Hyangga
Which responds with:
[source,js]
--------------------------------------------------
{
"tokens" : [ {
"token" : "향가", <1>
"start_offset" : 0,
"end_offset" : 2,
"type" : "word",
"position" : 0
}]
}
--------------------------------------------------
// TESTRESPONSE
<1> The Hanja form is replaced by the Hangul translation.

View File

@ -20,6 +20,10 @@ transliteration.
Advanced analysis of Japanese using the http://www.atilika.org/[Kuromoji analyzer].
<<analysis-nori,Nori>>::
Morphological analysis of Korean using the Lucene Nori analyzer.
<<analysis-phonetic,Phonetic>>::
Analyzes tokens into their phonetic equivalent using Soundex, Metaphone,
@ -59,6 +63,8 @@ include::analysis-icu.asciidoc[]
include::analysis-kuromoji.asciidoc[]
include::analysis-nori.asciidoc[]
include::analysis-phonetic.asciidoc[]
include::analysis-smartcn.asciidoc[]

View File

@ -16,10 +16,11 @@ Might look like:
name component version description
U7321H6 analysis-icu {version} The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.
U7321H6 analysis-kuromoji {version} The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.
U7321H6 analysis-nori {version} The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.
U7321H6 analysis-phonetic {version} The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.
U7321H6 analysis-smartcn {version} Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.
U7321H6 analysis-stempel {version} The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.
U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
U7321H6 discovery-file {version} Discovery file plugin enables unicast discovery from hosts stored in a file.

View File

@ -7,7 +7,8 @@ produces a single token.
The `normalizer` is applied prior to indexing the keyword, as well as at
search-time when the `keyword` field is searched via a query parser such as
the <<query-dsl-match-query,`match`>> query.
the <<query-dsl-match-query,`match`>> query or via a term level query
such as the <<query-dsl-term-query,`term`>> query.
[source,js]
--------------------------------
@ -53,6 +54,15 @@ PUT index/_doc/3
POST index/_refresh
GET index/_search
{
"query": {
"term": {
"foo": "BAR"
}
}
}
GET index/_search
{
"query": {
@ -64,7 +74,7 @@ GET index/_search
--------------------------------
// CONSOLE
The above query matches documents 1 and 2 since `BÀR` is converted to `bar` at
The above queries match documents 1 and 2 since `BÀR` is converted to `bar` at
both index and query time.
[source,js]

View File

@ -32,7 +32,7 @@ best tree_levels value to honor this precision. The value should be a
number followed by an optional distance unit. Valid distance units
include: `in`, `inch`, `yd`, `yard`, `mi`, `miles`, `km`, `kilometers`,
`m`,`meters`, `cm`,`centimeters`, `mm`, `millimeters`.
| `meters`
| `50m`
|`tree_levels` |Maximum number of layers to be used by the PrefixTree.
This can be used to control the precision of shape representations and
@ -42,7 +42,7 @@ certain level of understanding of the underlying implementation, users
may use the `precision` parameter instead. However, Elasticsearch only
uses the tree_levels parameter internally and this is what is returned
via the mapping API even if you use the precision parameter.
| `50m`
| various
|`strategy` |The strategy parameter defines the approach for how to
represent shapes at indexing and search time. It also influences the
@ -119,14 +119,14 @@ Geohashes are base32 encoded strings of the bits of the latitude and
longitude interleaved. So the longer the hash, the more precise it is.
Each character added to the geohash represents another tree level and
adds 5 bits of precision to the geohash. A geohash represents a
rectangular area and has 32 sub rectangles. The maximum amount of levels
in Elasticsearch is 24.
rectangular area and has 32 sub rectangles. The maximum number of levels
in Elasticsearch is 24; the default is 9.
* QuadPrefixTree - Uses a
http://en.wikipedia.org/wiki/Quadtree[quadtree] for grid squares.
Similar to geohash, quad trees interleave the bits of the latitude and
longitude the resulting hash is a bit set. A tree level in a quad tree
represents 2 bits in this bit set, one for each coordinate. The maximum
amount of levels for the quad trees in Elasticsearch is 50.
number of levels for the quad trees in Elasticsearch is 29; the default is 21.
[[spatial-strategy]]
[float]

View File

@ -3,7 +3,8 @@
While the <<full-text-queries,full text queries>> will analyze the query
string before executing, the _term-level queries_ operate on the exact terms
that are stored in the inverted index.
that are stored in the inverted index, and will normalize terms before executing
only for <<keyword,`keyword`>> fields with <<normalizer,`normalizer`>> property.
These queries are usually used for structured data like numbers, dates, and
enums, rather than full text fields. Alternatively, they allow you to craft

View File

@ -0,0 +1,5 @@
# Additional nouns
c++
C샤프
세종
세종시 세종 시

View File

@ -130,6 +130,8 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
filters.put("brazilianstem", BrazilianStemTokenFilterFactory.class);
filters.put("czechstem", CzechStemTokenFilterFactory.class);
filters.put("germanstem", GermanStemTokenFilterFactory.class);
// this filter is not exposed and should only be used internally
filters.put("fixedshingle", Void.class);
return filters;
}

View File

@ -1 +0,0 @@
cb82d9db3043bbd25b4d0eb5022ed1e529c936d3

View File

@ -0,0 +1 @@
63ff4af3504881744695f6239fcb3e9c0e3240b1

View File

@ -30,7 +30,7 @@ forbiddenApis {
dependencies {
compile "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}"
compile 'com.ibm.icu:icu4j:59.1'
compile 'com.ibm.icu:icu4j:61.1'
}
dependencyLicenses {

View File

@ -1 +0,0 @@
6f06e820cf4c8968bbbaae66ae0b33f6a256b57f

View File

@ -0,0 +1 @@
28d33b5e44e72edcc66a5da7a34a42147f38d987

View File

@ -1 +0,0 @@
c09216a18658d5b2912566efff8665e45edc24b4

View File

@ -0,0 +1 @@
5f3c053ef858c58c74a687a40f5451d19b69850b

View File

@ -89,9 +89,9 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory {
// cjkAsWords nor myanmarAsWords are not configurable yet.
ICUTokenizerConfig config = new DefaultICUTokenizerConfig(true, true) {
@Override
public BreakIterator getBreakIterator(int script) {
public RuleBasedBreakIterator getBreakIterator(int script) {
if (breakers[script] != null) {
return (BreakIterator) breakers[script].clone();
return (RuleBasedBreakIterator) breakers[script].clone();
} else {
return super.getBreakIterator(script);
}

View File

@ -1 +0,0 @@
c9d5bbd0affa90b46e173c762c35419a54977c35

View File

@ -0,0 +1 @@
a6e72085f7c2ade43ec0e5f52c227e6f715666ad

View File

@ -0,0 +1,32 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
esplugin {
description 'The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.'
classname 'org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin'
}
dependencies {
compile "org.apache.lucene:lucene-analyzers-nori:${versions.lucene}"
}
dependencyLicenses {
mapping from: /lucene-.*/, to: 'lucene'
}

View File

@ -0,0 +1,475 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
derived from unicode conversion examples available at
http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright
from those sources:
/*
* Copyright 2001-2004 Unicode, Inc.
*
* Disclaimer
*
* This source code is provided as is by Unicode, Inc. No claims are
* made as to fitness for any particular purpose. No warranties of any
* kind are expressed or implied. The recipient agrees to determine
* applicability of information provided. If this file has been
* purchased on magnetic or optical media from Unicode, Inc., the
* sole remedy for any claim will be exchange of defective media
* within 90 days of receipt.
*
* Limitations on Rights to Redistribute This Code
*
* Unicode, Inc. hereby grants the right to freely use the information
* supplied in this file in the creation of products supporting the
* Unicode Standard, and to make copies of this file in any form
* for internal or external distribution as long as this notice
* remains attached.
*/
Some code in core/src/java/org/apache/lucene/util/ArrayUtil.java was
derived from Python 2.4.2 sources available at
http://www.python.org. Full license is here:
http://www.python.org/download/releases/2.4.2/license/
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
derived from Python 3.1.2 sources available at
http://www.python.org. Full license is here:
http://www.python.org/download/releases/3.1.2/license/
Some code in core/src/java/org/apache/lucene/util/automaton was
derived from Brics automaton sources available at
www.brics.dk/automaton/. Here is the copyright from those sources:
/*
* Copyright (c) 2001-2009 Anders Moeller
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
The levenshtein automata tables in core/src/java/org/apache/lucene/util/automaton
were automatically generated with the moman/finenight FSA package.
Here is the copyright for those sources:
# Copyright (c) 2010, Jean-Philippe Barrette-LaPierre, <jpb@rrette.com>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was
derived from ICU (http://www.icu-project.org)
The full license is available here:
http://source.icu-project.org/repos/icu/icu/trunk/license.html
/*
* Copyright (C) 1999-2010, International Business Machines
* Corporation and others. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, and/or sell copies of the
* Software, and to permit persons to whom the Software is furnished to do so,
* provided that the above copyright notice(s) and this permission notice appear
* in all copies of the Software and that both the above copyright notice(s) and
* this permission notice appear in supporting documentation.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
* LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
* ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* Except as contained in this notice, the name of a copyright holder shall not
* be used in advertising or otherwise to promote the sale, use or other
* dealings in this Software without prior written authorization of the
* copyright holder.
*/
The following license applies to the Snowball stemmers:
Copyright (c) 2001, Dr Martin Porter
Copyright (c) 2002, Richard Boulton
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holders nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The following license applies to the KStemmer:
Copyright © 2003,
Center for Intelligent Information Retrieval,
University of Massachusetts, Amherst.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The names "Center for Intelligent Information Retrieval" and
"University of Massachusetts" must not be used to endorse or promote products
derived from this software without prior written permission. To obtain
permission, contact info@ciir.cs.umass.edu.
THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
The following license applies to the Morfologik project:
Copyright (c) 2006 Dawid Weiss
Copyright (c) 2007-2011 Dawid Weiss, Marcin Miłkowski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Morfologik nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---
The dictionary comes from Morfologik project. Morfologik uses data from
Polish ispell/myspell dictionary hosted at http://www.sjp.pl/slownik/en/ and
is licenced on the terms of (inter alia) LGPL and Creative Commons
ShareAlike. The part-of-speech tags were added in Morfologik project and
are not found in the data from sjp.pl. The tagset is similar to IPI PAN
tagset.
---
The following license applies to the Morfeusz project,
used by org.apache.lucene.analysis.morfologik.
BSD-licensed dictionary of Polish (SGJP)
http://sgjp.pl/morfeusz/
Copyright © 2011 Zygmunt Saloni, Włodzimierz Gruszczyński,
Marcin Woliński, Robert Wołosz
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS “AS IS” AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,204 @@
Apache Lucene
Copyright 2001-2018 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
Includes software from other Apache Software Foundation projects,
including, but not limited to:
- Apache Ant
- Apache Jakarta Regexp
- Apache Commons
- Apache Xerces
ICU4J, (under analysis/icu) is licensed under an MIT styles license
and Copyright (c) 1995-2008 International Business Machines Corporation and others
Some data files (under analysis/icu/src/data) are derived from Unicode data such
as the Unicode Character Database. See http://unicode.org/copyright.html for more
details.
Brics Automaton (under core/src/java/org/apache/lucene/util/automaton) is
BSD-licensed, created by Anders Møller. See http://www.brics.dk/automaton/
The levenshtein automata tables (under core/src/java/org/apache/lucene/util/automaton) were
automatically generated with the moman/finenight FSA library, created by
Jean-Philippe Barrette-LaPierre. This library is available under an MIT license,
see http://sites.google.com/site/rrettesite/moman and
http://bitbucket.org/jpbarrette/moman/overview/
The class org.apache.lucene.util.WeakIdentityMap was derived from
the Apache CXF project and is Apache License 2.0.
The Google Code Prettify is Apache License 2.0.
See http://code.google.com/p/google-code-prettify/
JUnit (junit-4.10) is licensed under the Common Public License v. 1.0
See http://junit.sourceforge.net/cpl-v10.html
This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin
g Package (jaspell): http://jaspell.sourceforge.net/
License: The BSD License (http://www.opensource.org/licenses/bsd-license.php)
The snowball stemmers in
analysis/common/src/java/net/sf/snowball
were developed by Martin Porter and Richard Boulton.
The snowball stopword lists in
analysis/common/src/resources/org/apache/lucene/analysis/snowball
were developed by Martin Porter and Richard Boulton.
The full snowball package is available from
http://snowball.tartarus.org/
The KStem stemmer in
analysis/common/src/org/apache/lucene/analysis/en
was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst)
under the BSD-license.
The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default
stopword list that is BSD-licensed created by Jacques Savoy. These files reside in:
analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt,
analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt,
analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt,
analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt,
analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt,
analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt
See http://members.unine.ch/jacques.savoy/clef/index.html.
The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers
(common) are based on BSD-licensed reference implementations created by Jacques Savoy and
Ljiljana Dolamic. These files reside in:
analysis/common/src/java/org/apache/lucene/analysis/de/GermanLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/de/GermanMinimalStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/es/SpanishLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchMinimalStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/it/ItalianLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/ru/RussianLightStemmer.java
analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishLightStemmer.java
The Stempel analyzer (stempel) includes BSD-licensed software developed
by the Egothor project http://egothor.sf.net/, created by Leo Galambos, Martin Kvapil,
and Edmond Nolan.
The Polish analyzer (stempel) comes with a default
stopword list that is BSD-licensed created by the Carrot2 project. The file resides
in stempel/src/resources/org/apache/lucene/analysis/pl/stopwords.txt.
See http://project.carrot2.org/license.html.
The SmartChineseAnalyzer source code (smartcn) was
provided by Xiaoping Gao and copyright 2009 by www.imdict.net.
WordBreakTestUnicode_*.java (under modules/analysis/common/src/test/)
is derived from Unicode data such as the Unicode Character Database.
See http://unicode.org/copyright.html for more details.
The Morfologik analyzer (morfologik) includes BSD-licensed software
developed by Dawid Weiss and Marcin Miłkowski (http://morfologik.blogspot.com/).
Morfologik uses data from Polish ispell/myspell dictionary
(http://www.sjp.pl/slownik/en/) licenced on the terms of (inter alia)
LGPL and Creative Commons ShareAlike.
Morfologic includes data from BSD-licensed dictionary of Polish (SGJP)
(http://sgjp.pl/morfeusz/)
Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original
source code for this can be found at http://www.eclipse.org/jetty/downloads.php
===========================================================================
Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration
===========================================================================
This software includes a binary and/or source version of data from
mecab-ipadic-2.7.0-20070801
which can be obtained from
http://atilika.com/releases/mecab-ipadic/mecab-ipadic-2.7.0-20070801.tar.gz
or
http://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/mecab-ipadic-2.7.0-20070801.tar.gz
===========================================================================
mecab-ipadic-2.7.0-20070801 Notice
===========================================================================
Nara Institute of Science and Technology (NAIST),
the copyright holders, disclaims all warranties with regard to this
software, including all implied warranties of merchantability and
fitness, in no event shall NAIST be liable for
any special, indirect or consequential damages or any damages
whatsoever resulting from loss of use, data or profits, whether in an
action of contract, negligence or other tortuous action, arising out
of or in connection with the use or performance of this software.
A large portion of the dictionary entries
originate from ICOT Free Software. The following conditions for ICOT
Free Software applies to the current dictionary as well.
Each User may also freely distribute the Program, whether in its
original form or modified, to any third party or parties, PROVIDED
that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear
on, or be attached to, the Program, which is distributed substantially
in the same form as set out herein and that such intended
distribution, if actually made, will neither violate or otherwise
contravene any of the laws and regulations of the countries having
jurisdiction over the User or the intended distribution itself.
NO WARRANTY
The program was produced on an experimental basis in the course of the
research and development conducted during the project and is provided
to users as so produced on an experimental basis. Accordingly, the
program is provided without any warranty whatsoever, whether express,
implied, statutory or otherwise. The term "warranty" used herein
includes, but is not limited to, any warranty of the quality,
performance, merchantability and fitness for a particular purpose of
the program and the nonexistence of any infringement or violation of
any right of any third party.
Each user of the program will agree and understand, and be deemed to
have agreed and understood, that there is no warranty whatsoever for
the program and, accordingly, the entire risk arising from or
otherwise connected with the program is assumed by the user.
Therefore, neither ICOT, the copyright holder, or any other
organization that participated in or was otherwise related to the
development of the program and their respective officials, directors,
officers and other employees shall be held liable for any and all
damages, including, without limitation, general, special, incidental
and consequential damages, arising out of or otherwise in connection
with the use or inability to use the program or any product, material
or result produced or otherwise obtained by using the program,
regardless of whether they have been advised of, or otherwise had
knowledge of, the possibility of such damages at any time during the
project or thereafter. Each user will be deemed to have agreed to the
foregoing by his or her commencement of use of the program. The term
"use" as used herein includes, but is not limited to, the use,
modification, copying and distribution of the program and the
production of secondary products from the program.
In the case where the program, whether in its original form or
modified, was distributed or delivered to or received by a user from
any person, organization or entity other than ICOT, unless it makes or
grants independently of ICOT any specific warranty to the user in
writing, such person, organization or entity, will also be exempted
from and not be held liable to the user for any such damages as noted
above as far as the program is concerned.
===========================================================================
Nori Korean Morphological Analyzer - Apache Lucene Integration
===========================================================================
This software includes a binary and/or source version of data from
mecab-ko-dic-2.0.3-20170922
which can be obtained from
https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/mecab-ko-dic-2.0.3-20170922.tar.gz

View File

@ -0,0 +1 @@
a7daed3dc3a67674862002f315cd9193944de783

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import java.util.List;
import java.util.Set;
import org.apache.lucene.analysis.ko.KoreanAnalyzer;
import org.apache.lucene.analysis.ko.KoreanPartOfSpeechStopFilter;
import org.apache.lucene.analysis.ko.KoreanTokenizer;
import org.apache.lucene.analysis.ko.dict.UserDictionary;
import org.apache.lucene.analysis.ko.POS;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import static org.elasticsearch.index.analysis.NoriPartOfSpeechStopFilterFactory.resolvePOSList;
public class NoriAnalyzerProvider extends AbstractIndexAnalyzerProvider<KoreanAnalyzer> {
private final KoreanAnalyzer analyzer;
public NoriAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
final KoreanTokenizer.DecompoundMode mode = NoriTokenizerFactory.getMode(settings);
final UserDictionary userDictionary = NoriTokenizerFactory.getUserDictionary(env, settings);
final List<String> tagList = Analysis.getWordList(env, settings, "stoptags");
final Set<POS.Tag> stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS;
analyzer = new KoreanAnalyzer(userDictionary, mode, stopTags, false);
}
@Override
public KoreanAnalyzer get() {
return analyzer;
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ko.KoreanPartOfSpeechStopFilter;
import org.apache.lucene.analysis.ko.POS;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class NoriPartOfSpeechStopFilterFactory extends AbstractTokenFilterFactory {
private final Set<POS.Tag> stopTags;
public NoriPartOfSpeechStopFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
List<String> tagList = Analysis.getWordList(env, settings, "stoptags");
this.stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS;
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new KoreanPartOfSpeechStopFilter(tokenStream, stopTags);
}
static Set<POS.Tag> resolvePOSList(List<String> tagList) {
Set<POS.Tag> stopTags = new HashSet<>();
for (String tag : tagList) {
stopTags.add(POS.resolveTag(tag));
}
return stopTags;
}
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ko.KoreanReadingFormFilter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
public class NoriReadingFormFilterFactory extends AbstractTokenFilterFactory {
public NoriReadingFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new KoreanReadingFormFilter(tokenStream);
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ko.KoreanTokenizer;
import org.apache.lucene.analysis.ko.dict.UserDictionary;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import java.io.IOException;
import java.io.Reader;
import java.util.Locale;
public class NoriTokenizerFactory extends AbstractTokenizerFactory {
private static final String USER_DICT_OPTION = "user_dictionary";
private final UserDictionary userDictionary;
private final KoreanTokenizer.DecompoundMode decompoundMode;
public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
decompoundMode = getMode(settings);
userDictionary = getUserDictionary(env, settings);
}
public static UserDictionary getUserDictionary(Environment env, Settings settings) {
try (Reader reader = Analysis.getReaderFromFile(env, settings, USER_DICT_OPTION)) {
if (reader == null) {
return null;
} else {
return UserDictionary.open(reader);
}
} catch (IOException e) {
throw new ElasticsearchException("failed to load nori user dictionary", e);
}
}
public static KoreanTokenizer.DecompoundMode getMode(Settings settings) {
KoreanTokenizer.DecompoundMode mode = KoreanTokenizer.DEFAULT_DECOMPOUND;
String modeSetting = settings.get("decompound_mode", null);
if (modeSetting != null) {
mode = KoreanTokenizer.DecompoundMode.valueOf(modeSetting.toUpperCase(Locale.ENGLISH));
}
return mode;
}
@Override
public Tokenizer create() {
return new KoreanTokenizer(KoreanTokenizer.DEFAULT_TOKEN_ATTRIBUTE_FACTORY, userDictionary, decompoundMode, false);
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.analysis.nori;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.NoriAnalyzerProvider;
import org.elasticsearch.index.analysis.NoriPartOfSpeechStopFilterFactory;
import org.elasticsearch.index.analysis.NoriReadingFormFilterFactory;
import org.elasticsearch.index.analysis.NoriTokenizerFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.singletonMap;
public class AnalysisNoriPlugin extends Plugin implements AnalysisPlugin {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
Map<String, AnalysisProvider<TokenFilterFactory>> extra = new HashMap<>();
extra.put("nori_part_of_speech", NoriPartOfSpeechStopFilterFactory::new);
extra.put("nori_readingform", NoriReadingFormFilterFactory::new);
return extra;
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("nori_tokenizer", NoriTokenizerFactory::new);
}
@Override
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
return singletonMap("nori", NoriAnalyzerProvider::new);
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.ko.KoreanTokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
import org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin;
import java.util.HashMap;
import java.util.Map;
public class AnalysisNoriFactoryTests extends AnalysisFactoryTestCase {
public AnalysisNoriFactoryTests() {
super(new AnalysisNoriPlugin());
}
@Override
protected Map<String, Class<?>> getTokenizers() {
Map<String, Class<?>> tokenizers = new HashMap<>(super.getTokenizers());
tokenizers.put("korean", KoreanTokenizerFactory.class);
return tokenizers;
}
@Override
protected Map<String, Class<?>> getTokenFilters() {
Map<String, Class<?>> filters = new HashMap<>(super.getTokenFilters());
filters.put("koreanpartofspeechstop", NoriPartOfSpeechStopFilterFactory.class);
filters.put("koreanreadingform", NoriReadingFormFilterFactory.class);
return filters;
}
}

View File

@ -0,0 +1,147 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ko.KoreanAnalyzer;
import org.apache.lucene.analysis.ko.KoreanTokenizer;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin;
import org.elasticsearch.test.ESTestCase.TestAnalysis;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.hamcrest.Matchers.instanceOf;
public class NoriAnalysisTests extends ESTokenStreamTestCase {
public void testDefaultsNoriAnalysis() throws IOException {
TestAnalysis analysis = createTestAnalysis(Settings.EMPTY);
TokenizerFactory tokenizerFactory = analysis.tokenizer.get("nori_tokenizer");
assertThat(tokenizerFactory, instanceOf(NoriTokenizerFactory.class));
TokenFilterFactory filterFactory = analysis.tokenFilter.get("nori_part_of_speech");
assertThat(filterFactory, instanceOf(NoriPartOfSpeechStopFilterFactory.class));
filterFactory = analysis.tokenFilter.get("nori_readingform");
assertThat(filterFactory, instanceOf(NoriReadingFormFilterFactory.class));
IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
NamedAnalyzer analyzer = indexAnalyzers.get("nori");
assertThat(analyzer.analyzer(), instanceOf(KoreanAnalyzer.class));
}
public void testNoriAnalyzer() throws Exception {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.my_analyzer.type", "nori")
.put("index.analysis.analyzer.my_analyzer.stoptags", "NR, SP")
.put("index.analysis.analyzer.my_analyzer.decompound_mode", "mixed")
.build();
TestAnalysis analysis = createTestAnalysis(settings);
Analyzer analyzer = analysis.indexAnalyzers.get("my_analyzer");
try (TokenStream stream = analyzer.tokenStream("", "여섯 용이" )) {
assertTokenStreamContents(stream, new String[] {"", ""});
}
try (TokenStream stream = analyzer.tokenStream("", "가늠표")) {
assertTokenStreamContents(stream, new String[] {"가늠표", "가늠", ""});
}
}
public void testNoriAnalyzerUserDict() throws Exception {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.my_analyzer.type", "nori")
.put("index.analysis.analyzer.my_analyzer.user_dictionary", "user_dict.txt")
.build();
TestAnalysis analysis = createTestAnalysis(settings);
Analyzer analyzer = analysis.indexAnalyzers.get("my_analyzer");
try (TokenStream stream = analyzer.tokenStream("", "세종시" )) {
assertTokenStreamContents(stream, new String[] {"세종", ""});
}
try (TokenStream stream = analyzer.tokenStream("", "c++world")) {
assertTokenStreamContents(stream, new String[] {"c++", "world"});
}
}
public void testNoriTokenizer() throws Exception {
Settings settings = Settings.builder()
.put("index.analysis.tokenizer.my_tokenizer.type", "nori_tokenizer")
.put("index.analysis.tokenizer.my_tokenizer.decompound_mode", "mixed")
.build();
TestAnalysis analysis = createTestAnalysis(settings);
Tokenizer tokenizer = analysis.tokenizer.get("my_tokenizer").create();
tokenizer.setReader(new StringReader("뿌리가 깊은 나무"));
assertTokenStreamContents(tokenizer, new String[] {"뿌리", "", "", "", "나무"});
tokenizer.setReader(new StringReader("가늠표"));
assertTokenStreamContents(tokenizer, new String[] {"가늠표", "가늠", ""});
}
public void testNoriPartOfSpeech() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.filter.my_filter.type", "nori_part_of_speech")
.put("index.analysis.filter.my_filter.stoptags", "NR, SP")
.build();
TestAnalysis analysis = createTestAnalysis(settings);
TokenFilterFactory factory = analysis.tokenFilter.get("my_filter");
Tokenizer tokenizer = new KoreanTokenizer();
tokenizer.setReader(new StringReader("여섯 용이"));
TokenStream stream = factory.create(tokenizer);
assertTokenStreamContents(stream, new String[] {"", ""});
}
public void testNoriReadingForm() throws IOException {
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_filter.type", "nori_readingform")
.build();
TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new AnalysisNoriPlugin());
TokenFilterFactory factory = analysis.tokenFilter.get("my_filter");
Tokenizer tokenizer = new KoreanTokenizer();
tokenizer.setReader(new StringReader("鄕歌"));
TokenStream stream = factory.create(tokenizer);
assertTokenStreamContents(stream, new String[] {"향가"});
}
private TestAnalysis createTestAnalysis(Settings analysisSettings) throws IOException {
InputStream dict = NoriAnalysisTests.class.getResourceAsStream("user_dict.txt");
Path home = createTempDir();
Path config = home.resolve("config");
Files.createDirectory(config);
Files.copy(dict, config.resolve("user_dict.txt"));
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(Environment.PATH_HOME_SETTING.getKey(), home)
.put(analysisSettings)
.build();
return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new AnalysisNoriPlugin());
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
public class NoriClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public NoriClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
}

View File

@ -0,0 +1,5 @@
# Additional nouns
c++
C샤프
세종
세종시 세종 시

View File

@ -0,0 +1,48 @@
# Integration tests for Korean analysis components
#
---
"Analyzer":
- do:
indices.analyze:
body:
text: 뿌리가 깊은 나무
analyzer: nori
- length: { tokens: 3 }
- match: { tokens.0.token: 뿌리 }
- match: { tokens.1.token: 깊 }
- match: { tokens.2.token: 나무 }
---
"Tokenizer":
- do:
indices.analyze:
body:
text: 뿌리가 깊은 나무
tokenizer: nori_tokenizer
- length: { tokens: 5 }
- match: { tokens.0.token: 뿌리 }
- match: { tokens.1.token: 가 }
- match: { tokens.2.token: 깊 }
- match: { tokens.3.token: 은 }
- match: { tokens.4.token: 나무 }
---
"Part of speech filter":
- do:
indices.analyze:
body:
text: 뿌리가 깊은 나무
tokenizer: nori_tokenizer
filter: [nori_part_of_speech]
- length: { tokens: 3 }
- match: { tokens.0.token: 뿌리 }
- match: { tokens.1.token: 깊 }
- match: { tokens.2.token: 나무 }
---
"Reading filter":
- do:
indices.analyze:
body:
text: 鄕歌
tokenizer: nori_tokenizer
filter: [nori_readingform]
- length: { tokens: 1 }
- match: { tokens.0.token: 향가 }

View File

@ -0,0 +1,32 @@
# Integration tests for Korean analysis components
#
---
"Index Korean content":
- do:
indices.create:
index: test
body:
mappings:
type:
properties:
text:
type: text
analyzer: nori
- do:
index:
index: test
type: type
id: 1
body: { "text": "뿌리가 깊은 나무는" }
- do:
indices.refresh: {}
- do:
search:
index: test
body:
query:
match:
text: 나무
- match: { hits.total: 1 }

View File

@ -1 +0,0 @@
4e6c63fa8ae005d81d12f0d88ffa98346b443ac4

View File

@ -0,0 +1 @@
25c93466d0a2c41df0cf98de77d632f3f02fa98d

View File

@ -1 +0,0 @@
37b7ff0a6493f139cb77f5bda965ac0189c8efd1

View File

@ -0,0 +1 @@
4688aaa48607ac26f6bf2567052019ab3fb2ff5e

View File

@ -1 +0,0 @@
d189185da23b2221c4d532da5e2cacce735f8a0c

View File

@ -0,0 +1 @@
ad71de632c9363c3f200cd5a240686256c7db431

View File

@ -1 +0,0 @@
74462b51de45afe708f1042cc901fe7370413871

View File

@ -0,0 +1 @@
96a630a7c4916358f129f6bac8718108811efe1a

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.http;
import org.apache.http.message.BasicHeader;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class NoHandlerIT extends HttpSmokeTestCase {
public void testNoHandlerRespectsAcceptHeader() throws IOException {
runTestNoHandlerRespectsAcceptHeader(
"application/json",
"application/json; charset=UTF-8",
"\"error\":\"no handler found for uri [/foo/bar/baz/qux/quux] and method [GET]\"");
runTestNoHandlerRespectsAcceptHeader(
"application/yaml",
"application/yaml",
"error: \"no handler found for uri [/foo/bar/baz/qux/quux] and method [GET]\"");
}
private void runTestNoHandlerRespectsAcceptHeader(
final String accept, final String contentType, final String expect) throws IOException {
final ResponseException e =
expectThrows(
ResponseException.class,
() -> getRestClient().performRequest("GET", "/foo/bar/baz/qux/quux", new BasicHeader("Accept", accept)));
final Response response = e.getResponse();
assertThat(response.getHeader("Content-Type"), equalTo(contentType));
assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString(expect));
assertThat(response.getStatusLine().getStatusCode(), is(400));
}
}

View File

@ -188,6 +188,10 @@ fi
install_and_check_plugin analysis kuromoji
}
@test "[$GROUP] install nori plugin" {
install_and_check_plugin analysis nori
}
@test "[$GROUP] install phonetic plugin" {
install_and_check_plugin analysis phonetic commons-codec-*.jar
}
@ -320,6 +324,10 @@ fi
remove_plugin analysis-kuromoji
}
@test "[$GROUP] remove nori plugin" {
remove_plugin analysis-nori
}
@test "[$GROUP] remove phonetic plugin" {
remove_plugin analysis-phonetic
}

View File

@ -16,6 +16,10 @@
}
},
"params": {
"master_timeout": {
"type": "time",
"description": "Specify timeout for connection to master"
},
"ignore_unavailable": {
"type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"

View File

@ -1 +0,0 @@
4325a5cdf8d3fa23f326cd86a2297fee2bc844f5

View File

@ -0,0 +1 @@
2b2be48f6622c150496e755497e7bdb8daa46030

View File

@ -1 +0,0 @@
3b618a21a924cb35ac1f27d3ca47d9ed04f43588

View File

@ -0,0 +1 @@
6cbafc48e8ac4966377665eb3bbe93f9addf04a5

View File

@ -1 +0,0 @@
040e2de30c5e6bad868b144e371730200719ceb3

View File

@ -0,0 +1 @@
0b06e4f6514256a3f187a9892e520638b9c59e63

View File

@ -1 +0,0 @@
20a5c472a8be9bec7aa40472791389e875b9e1f2

View File

@ -0,0 +1 @@
4c71cef87fe513a7a96c2a7980ed6f7c2b015763

View File

@ -1 +0,0 @@
1f92c7d3d9bc2765fe6195bcc4fcb160d11175cc

View File

@ -0,0 +1 @@
665e044d1180100940bccd7e8e41dde48e342da3

View File

@ -1 +0,0 @@
da4af75a7e4fe7843fbfa4b58e6a238b6b706d64

View File

@ -0,0 +1 @@
d343bbf5792f5969288b59b51179acd29d04f4ee

View File

@ -1 +0,0 @@
fc45b02a5086ec454e6d6ae81fc2cbe7be1c0902

View File

@ -0,0 +1 @@
8915f3c93af3348655bcc204289f9011835738a2

View File

@ -1 +0,0 @@
b6a2418a94b84c29c4b9fcfe4381f2cc1aa4c214

View File

@ -0,0 +1 @@
e7dc67b42eca3b1546a36370b6dcda0f83b2eb7d

View File

@ -1 +0,0 @@
6292a5579a6ab3423ceca60d2ea41cd86481e7c0

View File

@ -0,0 +1 @@
5946d5e2be276f66e9ff6d6111acabb03a9330d9

View File

@ -1 +0,0 @@
95b2563e5337377dde2eb987b3fce144be5e7a77

View File

@ -0,0 +1 @@
d9fc5fc63f3d861e5af72e11373368e8a4c6bba6

View File

@ -1 +0,0 @@
1efd2fa7cba1e359e3fbb8b4c11cab37024b2178

View File

@ -0,0 +1 @@
ee283c0a1a717f3e0915de75864a93d043efaee3

View File

@ -1 +0,0 @@
93512c2160bdc3e602141329e5945a91918b6752

View File

@ -0,0 +1 @@
e1adf0220a7c052ac81e2919ffac24ac0e5b007c

View File

@ -1 +0,0 @@
47090d8ddf99f6bbb64ee8ab7a76c3cd3165b88f

View File

@ -0,0 +1 @@
6d9306053942c48f43392a634f11a95462b5996e

View File

@ -1 +0,0 @@
ed8f07d67445d5acde6597996461640b2d92fa08

View File

@ -0,0 +1 @@
2334e8c5f4d0f98659b30e0c2035296e4aae8ff5

View File

@ -1 +0,0 @@
6034ccf6b27c659ab7a2678680bae8390fbfc40a

View File

@ -0,0 +1 @@
f2b2c454eb7b5d73b9df1390ea4730ce3dd4e463

View File

@ -171,10 +171,10 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_6_3_0_ID = 6030099;
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
public static final int V_6_4_0_ID = 6040099;
public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_7_0_0_alpha1_ID = 7000001;
public static final Version V_7_0_0_alpha1 =
new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version CURRENT = V_7_0_0_alpha1;
static {

View File

@ -576,7 +576,7 @@ public class ActionModule extends AbstractModule {
registerHandler.accept(new RestOpenIndexAction(settings, restController));
registerHandler.accept(new RestUpdateSettingsAction(settings, restController));
registerHandler.accept(new RestGetSettingsAction(settings, restController, indexScopedSettings, settingsFilter));
registerHandler.accept(new RestGetSettingsAction(settings, restController));
registerHandler.accept(new RestAnalyzeAction(settings, restController));
registerHandler.accept(new RestGetIndexTemplateAction(settings, restController));

View File

@ -20,13 +20,14 @@
package org.elasticsearch.action.admin.cluster.repositories.get;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@ -34,15 +35,15 @@ import java.util.List;
/**
* Get repositories response
*/
public class GetRepositoriesResponse extends ActionResponse implements Iterable<RepositoryMetaData> {
private List<RepositoryMetaData> repositories = Collections.emptyList();
public class GetRepositoriesResponse extends ActionResponse implements ToXContentObject {
private RepositoriesMetaData repositories;
GetRepositoriesResponse() {
repositories = new RepositoriesMetaData(Collections.emptyList());
}
GetRepositoriesResponse(List<RepositoryMetaData> repositories) {
GetRepositoriesResponse(RepositoriesMetaData repositories) {
this.repositories = repositories;
}
@ -52,43 +53,25 @@ public class GetRepositoriesResponse extends ActionResponse implements Iterable<
* @return list or repositories
*/
public List<RepositoryMetaData> repositories() {
return repositories;
return repositories.repositories();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
List<RepositoryMetaData> repositoryListBuilder = new ArrayList<>(size);
for (int j = 0; j < size; j++) {
repositoryListBuilder.add(new RepositoryMetaData(
in.readString(),
in.readString(),
Settings.readSettingsFromStream(in))
);
}
repositories = Collections.unmodifiableList(repositoryListBuilder);
repositories = new RepositoriesMetaData(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(repositories.size());
for (RepositoryMetaData repository : repositories) {
out.writeString(repository.name());
out.writeString(repository.type());
Settings.writeSettingsToStream(repository.settings(), out);
}
repositories.writeTo(out);
}
/**
* Iterator over the repositories data
*
* @return iterator over the repositories data
*/
@Override
public Iterator<RepositoryMetaData> iterator() {
return repositories.iterator();
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
repositories.toXContent(builder, params);
builder.endObject();
return builder;
}
}

View File

@ -75,9 +75,9 @@ public class TransportGetRepositoriesAction extends TransportMasterNodeReadActio
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (request.repositories().length == 0 || (request.repositories().length == 1 && "_all".equals(request.repositories()[0]))) {
if (repositories != null) {
listener.onResponse(new GetRepositoriesResponse(repositories.repositories()));
listener.onResponse(new GetRepositoriesResponse(repositories));
} else {
listener.onResponse(new GetRepositoriesResponse(Collections.<RepositoryMetaData>emptyList()));
listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetaData(Collections.emptyList())));
}
} else {
if (repositories != null) {
@ -102,7 +102,7 @@ public class TransportGetRepositoriesAction extends TransportMasterNodeReadActio
}
repositoryListBuilder.add(repositoryMetaData);
}
listener.onResponse(new GetRepositoriesResponse(Collections.unmodifiableList(repositoryListBuilder)));
listener.onResponse(new GetRepositoriesResponse(new RepositoriesMetaData(repositoryListBuilder)));
} else {
listener.onFailure(new RepositoryMissingException(request.repositories()[0]));
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.settings.get;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.ValidateActions;
@ -29,6 +30,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest> implements IndicesRequest.Replaceable {
@ -36,6 +39,7 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true);
private String[] names = Strings.EMPTY_ARRAY;
private boolean humanReadable = false;
private boolean includeDefaults = false;
@Override
public GetSettingsRequest indices(String... indices) {
@ -48,6 +52,16 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
return this;
}
/**
* When include_defaults is set, return default values which are normally suppressed.
* This flag is specific to the rest client.
*/
public GetSettingsRequest includeDefaults(boolean includeDefaults) {
this.includeDefaults = includeDefaults;
return this;
}
public GetSettingsRequest() {
}
@ -57,6 +71,9 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
indicesOptions = IndicesOptions.readIndicesOptions(in);
names = in.readStringArray();
humanReadable = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
includeDefaults = in.readBoolean();
}
}
@Override
@ -66,6 +83,9 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
indicesOptions.writeIndicesOptions(out);
out.writeStringArray(names);
out.writeBoolean(humanReadable);
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
out.writeBoolean(includeDefaults);
}
}
@Override
@ -96,6 +116,10 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
return this;
}
public boolean includeDefaults() {
return includeDefaults;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
@ -109,4 +133,24 @@ public class GetSettingsRequest extends MasterNodeReadRequest<GetSettingsRequest
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetSettingsRequest that = (GetSettingsRequest) o;
return humanReadable == that.humanReadable &&
includeDefaults == that.includeDefaults &&
Arrays.equals(indices, that.indices) &&
Objects.equals(indicesOptions, that.indicesOptions) &&
Arrays.equals(names, that.names);
}
@Override
public int hashCode() {
int result = Objects.hash(indicesOptions, humanReadable, includeDefaults);
result = 31 * result + Arrays.hashCode(indices);
result = 31 * result + Arrays.hashCode(names);
return result;
}
}

View File

@ -21,32 +21,83 @@ package org.elasticsearch.action.admin.indices.settings.get;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.CharBuffer;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
public class GetSettingsResponse extends ActionResponse {
public class GetSettingsResponse extends ActionResponse implements ToXContentObject {
private ImmutableOpenMap<String, Settings> indexToSettings = ImmutableOpenMap.of();
private ImmutableOpenMap<String, Settings> indexToDefaultSettings = ImmutableOpenMap.of();
public GetSettingsResponse(ImmutableOpenMap<String, Settings> indexToSettings) {
public GetSettingsResponse(ImmutableOpenMap<String, Settings> indexToSettings,
ImmutableOpenMap<String, Settings> indexToDefaultSettings) {
this.indexToSettings = indexToSettings;
this.indexToDefaultSettings = indexToDefaultSettings;
}
GetSettingsResponse() {
}
/**
* Returns a map of index name to {@link Settings} object. The returned {@link Settings}
* objects contain only those settings explicitly set on a given index. Any settings
* taking effect as defaults must be accessed via {@link #getIndexToDefaultSettings()}.
*/
public ImmutableOpenMap<String, Settings> getIndexToSettings() {
return indexToSettings;
}
/**
* If the originating {@link GetSettingsRequest} object was configured to include
* defaults, this will contain a mapping of index name to {@link Settings} objects.
* The returned {@link Settings} objects will contain only those settings taking
* effect as defaults. Any settings explicitly set on the index will be available
* via {@link #getIndexToSettings()}.
* See also {@link GetSettingsRequest#includeDefaults(boolean)}
*/
public ImmutableOpenMap<String, Settings> getIndexToDefaultSettings() {
return indexToDefaultSettings;
}
/**
* Returns the string value for the specified index and setting. If the includeDefaults
* flag was not set or set to false on the GetSettingsRequest, this method will only
* return a value where the setting was explicitly set on the index. If the includeDefaults
* flag was set to true on the GetSettingsRequest, this method will fall back to return the default
* value if the setting was not explicitly set.
*/
public String getSetting(String index, String setting) {
Settings settings = indexToSettings.get(index);
if (setting != null) {
return settings.get(setting);
if (settings != null && settings.hasValue(setting)) {
return settings.get(setting);
} else {
Settings defaultSettings = indexToDefaultSettings.get(index);
if (defaultSettings != null) {
return defaultSettings.get(setting);
} else {
return null;
}
}
} else {
return null;
}
@ -55,12 +106,22 @@ public class GetSettingsResponse extends ActionResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
ImmutableOpenMap.Builder<String, Settings> builder = ImmutableOpenMap.builder();
for (int i = 0; i < size; i++) {
builder.put(in.readString(), Settings.readSettingsFromStream(in));
int settingsSize = in.readVInt();
ImmutableOpenMap.Builder<String, Settings> settingsBuilder = ImmutableOpenMap.builder();
for (int i = 0; i < settingsSize; i++) {
settingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in));
}
indexToSettings = builder.build();
ImmutableOpenMap.Builder<String, Settings> defaultSettingsBuilder = ImmutableOpenMap.builder();
if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
int defaultSettingsSize = in.readVInt();
for (int i = 0; i < defaultSettingsSize ; i++) {
defaultSettingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in));
}
}
indexToSettings = settingsBuilder.build();
indexToDefaultSettings = defaultSettingsBuilder.build();
}
@Override
@ -71,5 +132,121 @@ public class GetSettingsResponse extends ActionResponse {
out.writeString(cursor.key);
Settings.writeSettingsToStream(cursor.value, out);
}
if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
out.writeVInt(indexToDefaultSettings.size());
for (ObjectObjectCursor<String, Settings> cursor : indexToDefaultSettings) {
out.writeString(cursor.key);
Settings.writeSettingsToStream(cursor.value, out);
}
}
}
private static void parseSettingsField(XContentParser parser, String currentIndexName, Map<String, Settings> indexToSettings,
Map<String, Settings> indexToDefaultSettings) throws IOException {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
switch (parser.currentName()) {
case "settings":
indexToSettings.put(currentIndexName, Settings.fromXContent(parser));
break;
case "defaults":
indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser));
break;
default:
parser.skipChildren();
}
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
parser.nextToken();
}
private static void parseIndexEntry(XContentParser parser, Map<String, Settings> indexToSettings,
Map<String, Settings> indexToDefaultSettings) throws IOException {
String indexName = parser.currentName();
parser.nextToken();
while (!parser.isClosed() && parser.currentToken() != XContentParser.Token.END_OBJECT) {
parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings);
}
}
public static GetSettingsResponse fromXContent(XContentParser parser) throws IOException {
HashMap<String, Settings> indexToSettings = new HashMap<>();
HashMap<String, Settings> indexToDefaultSettings = new HashMap<>();
if (parser.currentToken() == null) {
parser.nextToken();
}
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
parser.nextToken();
while (!parser.isClosed()) {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
//we must assume this is an index entry
parseIndexEntry(parser, indexToSettings, indexToDefaultSettings);
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
} else {
parser.nextToken();
}
}
ImmutableOpenMap<String, Settings> settingsMap = ImmutableOpenMap.<String, Settings>builder().putAll(indexToSettings).build();
ImmutableOpenMap<String, Settings> defaultSettingsMap =
ImmutableOpenMap.<String, Settings>builder().putAll(indexToDefaultSettings).build();
return new GetSettingsResponse(settingsMap, defaultSettingsMap);
}
@Override
public String toString() {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, baos);
toXContent(builder, ToXContent.EMPTY_PARAMS, false);
return Strings.toString(builder);
} catch (IOException e) {
throw new IllegalStateException(e); //should not be possible here
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return toXContent(builder, params, indexToDefaultSettings.isEmpty());
}
private XContentBuilder toXContent(XContentBuilder builder, Params params, boolean omitEmptySettings) throws IOException {
builder.startObject();
for (ObjectObjectCursor<String, Settings> cursor : getIndexToSettings()) {
// no settings, jump over it to shorten the response data
if (omitEmptySettings && cursor.value.isEmpty()) {
continue;
}
builder.startObject(cursor.key);
builder.startObject("settings");
cursor.value.toXContent(builder, params);
builder.endObject();
if (indexToDefaultSettings.isEmpty() == false) {
builder.startObject("defaults");
indexToDefaultSettings.get(cursor.key).toXContent(builder, params);
builder.endObject();
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetSettingsResponse that = (GetSettingsResponse) o;
return Objects.equals(indexToSettings, that.indexToSettings) &&
Objects.equals(indexToDefaultSettings, that.indexToDefaultSettings);
}
@Override
public int hashCode() {
return Objects.hash(indexToSettings, indexToDefaultSettings);
}
}

View File

@ -37,19 +37,23 @@ import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.common.settings.IndexScopedSettings;
import java.util.Map;
import java.util.Arrays;
public class TransportGetSettingsAction extends TransportMasterNodeReadAction<GetSettingsRequest, GetSettingsResponse> {
private final SettingsFilter settingsFilter;
private final IndexScopedSettings indexScopedSettings;
@Inject
public TransportGetSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, SettingsFilter settingsFilter, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexedScopedSettings) {
super(settings, GetSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, GetSettingsRequest::new, indexNameExpressionResolver);
this.settingsFilter = settingsFilter;
this.indexScopedSettings = indexedScopedSettings;
}
@Override
@ -69,25 +73,39 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction<Ge
return new GetSettingsResponse();
}
private static boolean isFilteredRequest(GetSettingsRequest request) {
return CollectionUtils.isEmpty(request.names()) == false;
}
@Override
protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener<GetSettingsResponse> listener) {
Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
ImmutableOpenMap.Builder<String, Settings> indexToSettingsBuilder = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<String, Settings> indexToDefaultSettingsBuilder = ImmutableOpenMap.builder();
for (Index concreteIndex : concreteIndices) {
IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex);
if (indexMetaData == null) {
continue;
}
Settings settings = settingsFilter.filter(indexMetaData.getSettings());
Settings indexSettings = settingsFilter.filter(indexMetaData.getSettings());
if (request.humanReadable()) {
settings = IndexMetaData.addHumanReadableSettings(settings);
indexSettings = IndexMetaData.addHumanReadableSettings(indexSettings);
}
if (CollectionUtils.isEmpty(request.names()) == false) {
settings = settings.filter(k -> Regex.simpleMatch(request.names(), k));
if (isFilteredRequest(request)) {
indexSettings = indexSettings.filter(k -> Regex.simpleMatch(request.names(), k));
}
indexToSettingsBuilder.put(concreteIndex.getName(), indexSettings);
if (request.includeDefaults()) {
Settings defaultSettings = settingsFilter.filter(indexScopedSettings.diff(indexSettings, Settings.EMPTY));
if (isFilteredRequest(request)) {
defaultSettings = defaultSettings.filter(k -> Regex.simpleMatch(request.names(), k));
}
indexToDefaultSettingsBuilder.put(concreteIndex.getName(), defaultSettings);
}
indexToSettingsBuilder.put(concreteIndex.getName(), settings);
}
listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build()));
listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build(), indexToDefaultSettingsBuilder.build()));
}
}

View File

@ -39,6 +39,7 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -58,14 +59,14 @@ import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index template.
*/
public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateRequest> implements IndicesRequest {
public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateRequest> implements IndicesRequest, ToXContent {
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(PutIndexTemplateRequest.class));
@ -539,4 +540,34 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
}
out.writeOptionalVInt(version);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (customs.isEmpty() == false) {
throw new IllegalArgumentException("Custom data type is no longer supported in index template [" + customs + "]");
}
builder.field("index_patterns", indexPatterns);
builder.field("order", order);
if (version != null) {
builder.field("version", version);
}
builder.startObject("settings");
settings.toXContent(builder, params);
builder.endObject();
builder.startObject("mappings");
for (Map.Entry<String, String> entry : mappings.entrySet()) {
Map<String, Object> mapping = XContentHelper.convertToMap(new BytesArray(entry.getValue()), false).v2();
builder.field(entry.getKey(), mapping);
}
builder.endObject();
builder.startObject("aliases");
for (Alias alias : aliases) {
alias.toXContent(builder, params);
}
builder.endObject();
return builder;
}
}

View File

@ -21,6 +21,8 @@ package org.elasticsearch.action.admin.indices.template.put;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -47,4 +49,14 @@ public class PutIndexTemplateResponse extends AcknowledgedResponse {
super.writeTo(out);
writeAcknowledged(out);
}
private static final ConstructingObjectParser<PutIndexTemplateResponse, Void> PARSER;
static {
PARSER = new ConstructingObjectParser<>("put_index_template", true, args -> new PutIndexTemplateResponse((boolean) args[0]));
declareAcknowledgedField(PARSER);
}
public static PutIndexTemplateResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
}

View File

@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
@ -51,7 +52,7 @@ public class RepositoriesMetaData extends AbstractNamedDiffable<Custom> implemen
* @param repositories list of repositories
*/
public RepositoriesMetaData(List<RepositoryMetaData> repositories) {
this.repositories = repositories;
this.repositories = Collections.unmodifiableList(repositories);
}
/**
@ -107,7 +108,7 @@ public class RepositoriesMetaData extends AbstractNamedDiffable<Custom> implemen
for (int i = 0; i < repository.length; i++) {
repository[i] = new RepositoryMetaData(in);
}
this.repositories = Arrays.asList(repository);
this.repositories = Collections.unmodifiableList(Arrays.asList(repository));
}
public static NamedDiff<Custom> readDiffFrom(StreamInput in) throws IOException {

View File

@ -31,6 +31,7 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
@ -317,26 +318,24 @@ public class KeyStoreWrapper implements SecureSettings {
DataInputStream input = new DataInputStream(bytesStream)) {
int saltLen = input.readInt();
salt = new byte[saltLen];
if (input.read(salt) != saltLen) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
input.readFully(salt);
int ivLen = input.readInt();
iv = new byte[ivLen];
if (input.read(iv) != ivLen) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
input.readFully(iv);
int encryptedLen = input.readInt();
encryptedBytes = new byte[encryptedLen];
if (input.read(encryptedBytes) != encryptedLen) {
input.readFully(encryptedBytes);
if (input.read() != -1) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
} catch (EOFException e) {
throw new SecurityException("Keystore has been corrupted or tampered with", e);
}
Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv);
try (ByteArrayInputStream bytesStream = new ByteArrayInputStream(encryptedBytes);
CipherInputStream cipherStream = new CipherInputStream(bytesStream, cipher);
DataInputStream input = new DataInputStream(cipherStream)) {
entries.set(new HashMap<>());
int numEntries = input.readInt();
while (numEntries-- > 0) {
@ -344,11 +343,14 @@ public class KeyStoreWrapper implements SecureSettings {
EntryType entryType = EntryType.valueOf(input.readUTF());
int entrySize = input.readInt();
byte[] entryBytes = new byte[entrySize];
if (input.read(entryBytes) != entrySize) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
input.readFully(entryBytes);
entries.get().put(setting, new Entry(entryType, entryBytes));
}
if (input.read() != -1) {
throw new SecurityException("Keystore has been corrupted or tampered with");
}
} catch (EOFException e) {
throw new SecurityException("Keystore has been corrupted or tampered with", e);
}
}
@ -360,7 +362,6 @@ public class KeyStoreWrapper implements SecureSettings {
Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv);
try (CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher);
DataOutputStream output = new DataOutputStream(cipherStream)) {
output.writeInt(entries.get().size());
for (Map.Entry<String, Entry> mapEntry : entries.get().entrySet()) {
output.writeUTF(mapEntry.getKey());
@ -370,7 +371,6 @@ public class KeyStoreWrapper implements SecureSettings {
output.write(entry.bytes);
}
}
return bytes.toByteArray();
}

View File

@ -401,9 +401,15 @@ public class RestController extends AbstractComponent implements HttpServerTrans
* Handle a requests with no candidate handlers (return a 400 Bad Request
* error).
*/
private void handleBadRequest(RestRequest request, RestChannel channel) {
channel.sendResponse(new BytesRestResponse(BAD_REQUEST,
"No handler found for uri [" + request.uri() + "] and method [" + request.method() + "]"));
private void handleBadRequest(RestRequest request, RestChannel channel) throws IOException {
try (XContentBuilder builder = channel.newErrorBuilder()) {
builder.startObject();
{
builder.field("error", "no handler found for uri [" + request.uri() + "] and method [" + request.method() + "]");
}
builder.endObject();
channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder));
}
}
/**

Some files were not shown because too many files have changed in this diff Show More