From 3d9f8ed764495ab85198f89baea9c06c72311275 Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Fri, 2 Sep 2016 00:04:13 +0900 Subject: [PATCH 01/40] Remove `token_filter` in _analyze API Remove the param and change docs Closes #20283 --- .../action/admin/indices/RestAnalyzeAction.java | 6 +++--- .../admin/indices/RestAnalyzeActionTests.java | 15 +++++++++++++++ docs/reference/indices/analyze.asciidoc | 6 +++--- .../reference/migration/migrate_5_0/rest.asciidoc | 4 ++-- 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 7801aad8e0b..04d0bf57612 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -50,7 +50,7 @@ public class RestAnalyzeAction extends BaseRestHandler { public static final ParseField TEXT = new ParseField("text"); public static final ParseField FIELD = new ParseField("field"); public static final ParseField TOKENIZER = new ParseField("tokenizer"); - public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter"); + public static final ParseField TOKEN_FILTERS = new ParseField("filter"); public static final ParseField CHAR_FILTERS = new ParseField("char_filter"); public static final ParseField EXPLAIN = new ParseField("explain"); public static final ParseField ATTRIBUTES = new ParseField("attributes"); @@ -77,7 +77,7 @@ public class RestAnalyzeAction extends BaseRestHandler { if (request.hasParam("tokenizer")) { analyzeRequest.tokenizer(request.param("tokenizer")); } - for (String filter : request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", Strings.EMPTY_ARRAY))) { + for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) { analyzeRequest.addTokenFilter(filter); } for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) { @@ -144,7 +144,7 @@ public class RestAnalyzeAction extends BaseRestHandler { analyzeRequest.addTokenFilter(parser.map()); } else { throw new IllegalArgumentException(currentFieldName - + " array element should contain token_filter's name or setting"); + + " array element should contain filter's name or setting"); } } } else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS) diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index de188ba9e91..5795002892a 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -181,6 +181,21 @@ public class RestAnalyzeActionTests extends ESTestCase { assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); } + content = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filter", "lowercase") + .endObject().bytes(); + + analyzeRequest = new AnalyzeRequest("for test"); + + try { + RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); + } } } diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index ee8b856ef41..e2a5cf1661b 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -43,13 +43,13 @@ curl -XGET 'localhost:9200/_analyze' -d ' curl -XGET 'localhost:9200/_analyze' -d ' { "tokenizer" : "keyword", - "token_filter" : ["lowercase"], + "filter" : ["lowercase"], "char_filter" : ["html_strip"], "text" : "this is a test" }' -------------------------------------------------- -deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`] +deprecated[5.0.0, Use `filter`/`char_filter` instead of `filters`/`char_filters` and `token_filters` has been removed] Custom tokenizers, token filters, and character filters can be specified in the request body as follows: @@ -112,7 +112,7 @@ provided it doesn't start with `{` : [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filter=lowercase&char_filter=html_strip' -d 'this is a test' +curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a test' -------------------------------------------------- === Explain Analyze diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc index 278acd52c43..c17effeb04a 100644 --- a/docs/reference/migration/migrate_5_0/rest.asciidoc +++ b/docs/reference/migration/migrate_5_0/rest.asciidoc @@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0. ==== Analyze API changes -The deprecated `filters`/`token_filters`/`char_filters` parameter has been -renamed `filter`/`token_filter`/`char_filter`. +The deprecated `filters`/`char_filters` parameter has been +renamed `filter`/`char_filter` and `token_filters` parameter has been removed. ==== `DELETE /_query` endpoint removed From c869ca18ebc4ecfda3eb8e63adbc354600934b55 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Thu, 25 Aug 2016 15:42:55 -0400 Subject: [PATCH 02/40] Update breaking changes for completion suggester To indicate we removed completion payload option in favour of returning document source with completion suggestions --- .../migration/migrate_5_0/suggest.asciidoc | 61 +++++++++++-------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/docs/reference/migration/migrate_5_0/suggest.asciidoc b/docs/reference/migration/migrate_5_0/suggest.asciidoc index 0b67711fe00..822dcd45700 100644 --- a/docs/reference/migration/migrate_5_0/suggest.asciidoc +++ b/docs/reference/migration/migrate_5_0/suggest.asciidoc @@ -25,35 +25,46 @@ to suggestion entries for both context and completion suggesters. ==== Completion suggester is document-oriented -Suggestions are aware of the document they belong to. This enables -retrieving any field value from the document. This is exposed -through the query-time `payload` option in `completion` and `context` -suggesters: +Suggestions are aware of the document they belong to. Now, associated +documents (`_source`) are returned as part of `completion` suggestions. -[source,sh] ---------------- -GET /my_index/_search -{ - "suggest": { - "fooSuggestion": { - "text": "f" - "completion": { - "field": "fooSuggest", - "payload": ["field1", "field2"] - } - } - } -} ---------------- +WARNING: `_source` meta-field must be enabled, which is the default behavior, +to enable returning `_source` with suggestions. Previously, `context` and `completion` suggesters supported an index-time `payloads` option, which was used to store and return metadata with suggestions. -Now metadata can be stored as a field in the same document as the -suggestion for enabling retrieval at query-time. The support for -index-time `payloads` has been removed to avoid bloating the in-memory -index with suggestion metadata. The time that it takes to retrieve payloads -depends heavily on the size of the `_source` field. The smaller the `_source`, -the faster the retrieval. +Now metadata can be stored as part of the the same document as the +suggestion for retrieval at query-time. The support for index-time `payloads` +has been removed to avoid bloating the in-memory index with suggestion metadata. + +In case of completion queries spanning more than one shard, the suggest is executed +in two phases, where the last phase fetches the relevant documents from shards, +implying executing completion requests against a single shard is more performant +due to the document fetch overhead when the suggest spans multiple shards. +To get best performance for completions, it is recommended to index completions +into a single shard index. In case of high heap usage due to shard size, it is still +recommended to break index into multiple shards instead of optimizing for completion +performance. + +Completion results return the full document `_source` by default. The size of +the `_source` can impact performance due to disk fetch and network transport overhead. +For best performance, filter out unnecessary fields from the `_source` using +<> to minimize `_source` size. +The following demonstrates an example completion query with source filtering: + +[source,js] +-------------------------------------------------- +POST music/_suggest +{ + "_source": "completion.*", + "song-suggest" : { + "prefix" : "nir", + "completion" : { + "field" : "suggest" + } + } +} +-------------------------------------------------- ==== Simpler completion indexing From aef2e5d90ef99b4ede25804ef9b3b024146f5d88 Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Fri, 2 Sep 2016 11:47:29 +0900 Subject: [PATCH 03/40] Remove `token_filter` in _analyze API Fix wording in docs Refactoring RestAnalyzeActionTests using expectThrows() Closes #20283 --- .../admin/indices/RestAnalyzeActionTests.java | 132 +++++++----------- .../migration/migrate_5_0/rest.asciidoc | 4 +- 2 files changed, 52 insertions(+), 84 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 5795002892a..9b7d4073d0d 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; @@ -90,14 +89,10 @@ public class RestAnalyzeActionTests extends ESTestCase { public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - - try { - RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), equalTo("Failed to parse request body")); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent( + new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), equalTo("Failed to parse request body")); } public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { @@ -107,14 +102,9 @@ public class RestAnalyzeActionTests extends ESTestCase { .field("text", "THIS IS A TEST") .field("unknown", "keyword") .endObject().bytes(); - - try { - RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { @@ -123,79 +113,57 @@ public class RestAnalyzeActionTests extends ESTestCase { .startObject() .field("explain", "fals") .endObject().bytes(); - try { - RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - fail("shouldn't get here"); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); } public void testDeprecatedParamException() throws Exception { - BytesReference content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject().bytes(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent( + XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject().bytes(), + new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - try { - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); - } + e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent( + XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filters", "lowercase") + .endObject().bytes(), + new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); - content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filters", "lowercase") - .endObject().bytes(); - analyzeRequest = new AnalyzeRequest("for test"); + e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent( + XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("char_filters", "lowercase") + .endObject().bytes(), + new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); - try { - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); - } - - content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("char_filters", "lowercase") - .endObject().bytes(); - - analyzeRequest = new AnalyzeRequest("for test"); - - try { - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); - } - - content = XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filter", "lowercase") - .endObject().bytes(); - - analyzeRequest = new AnalyzeRequest("for test"); - - try { - RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); - } catch (Exception e) { - assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); - } + e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent( + XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filter", "lowercase") + .endObject().bytes() + , new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); + assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } } diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc index c17effeb04a..6b4b4bb66c5 100644 --- a/docs/reference/migration/migrate_5_0/rest.asciidoc +++ b/docs/reference/migration/migrate_5_0/rest.asciidoc @@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0. ==== Analyze API changes -The deprecated `filters`/`char_filters` parameter has been -renamed `filter`/`char_filter` and `token_filters` parameter has been removed. +The `filters` and `char_filters` parameters have been renamed `filter` and `char_filter`. +The `token_filters` parameter has been removed. Use `filter` instead. ==== `DELETE /_query` endpoint removed From f6ab4e107870ce2dbed4847f5eb90a5edf314455 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 13:55:03 +0200 Subject: [PATCH 04/40] ByteSizeValue to implement Writeable rather than Streamable With this we can make ByteSizeValue immutable for real. --- .../common/unit/ByteSizeValue.java | 34 ++++++------------- .../RecoverFilesRecoveryException.java | 2 +- .../common/unit/ByteSizeValueTests.java | 20 +++++++++++ 3 files changed, 32 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 32df65850a8..16f07926583 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -23,20 +23,25 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; import java.util.Locale; import java.util.Objects; -public class ByteSizeValue implements Streamable { +public class ByteSizeValue implements Writeable { - private long size; + private final long size; + private final ByteSizeUnit sizeUnit; - private ByteSizeUnit sizeUnit; - - private ByteSizeValue() { + public ByteSizeValue(StreamInput in) throws IOException { + size = in.readVLong(); + sizeUnit = ByteSizeUnit.BYTES; + } + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(bytes()); } public ByteSizeValue(long bytes) { @@ -218,23 +223,6 @@ public class ByteSizeValue implements Streamable { return new ByteSizeValue(bytes, ByteSizeUnit.BYTES); } - public static ByteSizeValue readBytesSizeValue(StreamInput in) throws IOException { - ByteSizeValue sizeValue = new ByteSizeValue(); - sizeValue.readFrom(in); - return sizeValue; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - size = in.readVLong(); - sizeUnit = ByteSizeUnit.BYTES; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(bytes()); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java index 69a55e03c9a..d2e07bd9e4c 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java @@ -57,7 +57,7 @@ public class RecoverFilesRecoveryException extends ElasticsearchException implem public RecoverFilesRecoveryException(StreamInput in) throws IOException{ super(in); numberOfFiles = in.readInt(); - totalFilesSize = ByteSizeValue.readBytesSizeValue(in); + totalFilesSize = new ByteSizeValue(in); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java index b075e9d56d7..0eada4f2cde 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java @@ -20,9 +20,13 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; import org.hamcrest.MatcherAssert; +import java.io.IOException; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -165,4 +169,20 @@ public class ByteSizeValueTests extends ESTestCase { assertThat(e.getMessage(), containsString("failed to parse setting [test]")); } } + + public void testSerialization() throws IOException { + //negative values cannot be serialized at the moment, we do abs but that is not enough with Long.MIN_VALUE + long l = Long.MIN_VALUE; + while (l == Long.MIN_VALUE) { + l = randomLong(); + } + ByteSizeValue byteSizeValue = new ByteSizeValue(Math.abs(l), randomFrom(ByteSizeUnit.values())); + try (BytesStreamOutput out = new BytesStreamOutput()) { + byteSizeValue.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + ByteSizeValue deserializedByteSizeValue = new ByteSizeValue(in); + assertEquals(byteSizeValue, deserializedByteSizeValue); + } + } + } } From bea863c6608d2b27367828f2d961559241345825 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 14:30:17 +0200 Subject: [PATCH 05/40] OsInfo to implement Writeable rather than Streamable This allows to make all instance members final. Also added serialization tests and sorted out inizialization that was scattered in two places. --- .../admin/cluster/node/info/NodeInfo.java | 6 +- .../elasticsearch/monitor/os/DummyOsInfo.java | 9 +-- .../org/elasticsearch/monitor/os/OsInfo.java | 68 +++++++++---------- .../org/elasticsearch/monitor/os/OsProbe.java | 10 +-- .../elasticsearch/monitor/os/OsService.java | 18 ++--- .../elasticsearch/monitor/os/OsInfoTests.java | 60 ++++++++++++++++ .../monitor/os/OsProbeTests.java | 26 +++++-- 7 files changed, 121 insertions(+), 76 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index d7ce899792f..f8bb1b422aa 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -37,10 +37,6 @@ import org.elasticsearch.threadpool.ThreadPoolInfo; import org.elasticsearch.transport.TransportInfo; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static java.util.Collections.unmodifiableMap; /** * Node information (static, does not change over time). @@ -206,7 +202,7 @@ public class NodeInfo extends BaseNodeResponse { settings = Settings.readSettingsFromStream(in); } if (in.readBoolean()) { - os = OsInfo.readOsInfo(in); + os = new OsInfo(in); } if (in.readBoolean()) { process = ProcessInfo.readProcessInfo(in); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java index 599755e78a4..af6ea851803 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java @@ -21,13 +21,8 @@ package org.elasticsearch.monitor.os; public class DummyOsInfo extends OsInfo { - DummyOsInfo() { - refreshInterval = 0; - availableProcessors = 0; - allocatedProcessors = 0; - name = "dummy_name"; - arch = "dummy_arch"; - version = "dummy_version"; + private DummyOsInfo() { + super(0, 0, 0, "dummy_name", "dummy_arch", "dummy_version"); } public static final DummyOsInfo INSTANCE = new DummyOsInfo(); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java index f0520358524..7a0175c31d1 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java @@ -21,25 +21,47 @@ package org.elasticsearch.monitor.os; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -public class OsInfo implements Streamable, ToXContent { +public class OsInfo implements Writeable, ToXContent { - long refreshInterval; + private final long refreshInterval; + private final int availableProcessors; + private final int allocatedProcessors; + private final String name; + private final String arch; + private final String version; - int availableProcessors; + public OsInfo(long refreshInterval, int availableProcessors, int allocatedProcessors, String name, String arch, String version) { + this.refreshInterval = refreshInterval; + this.availableProcessors = availableProcessors; + this.allocatedProcessors = allocatedProcessors; + this.name = name; + this.arch = arch; + this.version = version; + } - int allocatedProcessors; + public OsInfo(StreamInput in) throws IOException { + this.refreshInterval = in.readLong(); + this.availableProcessors = in.readInt(); + this.allocatedProcessors = in.readInt(); + this.name = in.readOptionalString(); + this.arch = in.readOptionalString(); + this.version = in.readOptionalString(); + } - String name = null; - String arch = null; - String version = null; - - OsInfo() { + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(refreshInterval); + out.writeInt(availableProcessors); + out.writeInt(allocatedProcessors); + out.writeOptionalString(name); + out.writeOptionalString(arch); + out.writeOptionalString(version); } public long getRefreshInterval() { @@ -95,30 +117,4 @@ public class OsInfo implements Streamable, ToXContent { builder.endObject(); return builder; } - - public static OsInfo readOsInfo(StreamInput in) throws IOException { - OsInfo info = new OsInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - refreshInterval = in.readLong(); - availableProcessors = in.readInt(); - allocatedProcessors = in.readInt(); - name = in.readOptionalString(); - arch = in.readOptionalString(); - version = in.readOptionalString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeLong(refreshInterval); - out.writeInt(availableProcessors); - out.writeInt(allocatedProcessors); - out.writeOptionalString(name); - out.writeOptionalString(arch); - out.writeOptionalString(version); - } } diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 03720658e24..08abfc05f1d 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -163,13 +163,9 @@ public class OsProbe { private OsProbe() { } - public OsInfo osInfo() { - OsInfo info = new OsInfo(); - info.availableProcessors = Runtime.getRuntime().availableProcessors(); - info.name = Constants.OS_NAME; - info.arch = Constants.OS_ARCH; - info.version = Constants.OS_VERSION; - return info; + public OsInfo osInfo(long refreshInterval, int allocatedProcessors) { + return new OsInfo(refreshInterval, Runtime.getRuntime().availableProcessors(), + allocatedProcessors, Constants.OS_NAME, Constants.OS_ARCH, Constants.OS_VERSION); } public OsStats osStats() { diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java index 248b49f21cb..cb67eef852c 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java @@ -27,32 +27,22 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.common.util.concurrent.EsExecutors; -/** - * - */ public class OsService extends AbstractComponent { private final OsProbe probe; - private final OsInfo info; - - private SingleObjectCache osStatsCache; + private final SingleObjectCache osStatsCache; public static final Setting REFRESH_INTERVAL_SETTING = Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), - Property.NodeScope); + Property.NodeScope); public OsService(Settings settings) { super(settings); this.probe = OsProbe.getInstance(); - TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings); - - this.info = probe.osInfo(); - this.info.refreshInterval = refreshInterval.millis(); - this.info.allocatedProcessors = EsExecutors.boundedNumberOfProcessors(settings); - - osStatsCache = new OsStatsCache(refreshInterval, probe.osStats()); + this.info = probe.osInfo(refreshInterval.millis(), EsExecutors.boundedNumberOfProcessors(settings)); + this.osStatsCache = new OsStatsCache(refreshInterval, probe.osStats()); logger.debug("using refresh_interval [{}]", refreshInterval); } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java new file mode 100644 index 00000000000..ed549b38e13 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.monitor.os; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class OsInfoTests extends ESTestCase { + + public void testSerialization() throws IOException { + int availableProcessors = randomIntBetween(1, 64); + int allocatedProcessors = randomIntBetween(1, availableProcessors); + long refreshInterval; + if (randomBoolean()) { + refreshInterval = -1; + } else { + refreshInterval = randomLong(); + while (refreshInterval == Long.MIN_VALUE) { + refreshInterval = randomLong(); + } + refreshInterval = Math.abs(refreshInterval); + } + String name = randomAsciiOfLengthBetween(3, 10); + String arch = randomAsciiOfLengthBetween(3, 10); + String version = randomAsciiOfLengthBetween(3, 10); + OsInfo osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); + try (BytesStreamOutput out = new BytesStreamOutput()) { + osInfo.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + OsInfo deserializedOsInfo = new OsInfo(in); + assertEquals(osInfo.getRefreshInterval(), deserializedOsInfo.getRefreshInterval()); + assertEquals(osInfo.getAvailableProcessors(), deserializedOsInfo.getAvailableProcessors()); + assertEquals(osInfo.getAllocatedProcessors(), deserializedOsInfo.getAllocatedProcessors()); + assertEquals(osInfo.getName(), deserializedOsInfo.getName()); + assertEquals(osInfo.getArch(), deserializedOsInfo.getArch()); + assertEquals(osInfo.getVersion(), deserializedOsInfo.getVersion()); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 2a80fe39732..ea15487a3b4 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -32,16 +32,28 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class OsProbeTests extends ESTestCase { - OsProbe probe = OsProbe.getInstance(); + private OsProbe probe = OsProbe.getInstance(); public void testOsInfo() { - OsInfo info = probe.osInfo(); + int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors()); + long refreshInterval; + if (randomBoolean()) { + refreshInterval = -1; + } else { + refreshInterval = randomLong(); + while (refreshInterval == Long.MIN_VALUE) { + refreshInterval = randomLong(); + } + refreshInterval = Math.abs(refreshInterval); + } + OsInfo info = probe.osInfo(refreshInterval, allocatedProcessors); assertNotNull(info); - assertThat(info.getRefreshInterval(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L))); - assertThat(info.getName(), equalTo(Constants.OS_NAME)); - assertThat(info.getArch(), equalTo(Constants.OS_ARCH)); - assertThat(info.getVersion(), equalTo(Constants.OS_VERSION)); - assertThat(info.getAvailableProcessors(), equalTo(Runtime.getRuntime().availableProcessors())); + assertEquals(refreshInterval, info.getRefreshInterval()); + assertEquals(Constants.OS_NAME, info.getName()); + assertEquals(Constants.OS_ARCH, info.getArch()); + assertEquals(Constants.OS_VERSION, info.getVersion()); + assertEquals(allocatedProcessors, info.getAllocatedProcessors()); + assertEquals(Runtime.getRuntime().availableProcessors(), info.getAvailableProcessors()); } public void testOsStats() { From 279f8b27e30979908e1110078e901779f38f491e Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 17:14:08 +0200 Subject: [PATCH 06/40] JvmInfo to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../elasticsearch/monitor/jvm/JvmInfo.java | 331 ++++++++++-------- .../monitor/jvm/JvmInfoTests.java | 38 ++ 3 files changed, 215 insertions(+), 156 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index f8bb1b422aa..93141b74f9f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -208,7 +208,7 @@ public class NodeInfo extends BaseNodeResponse { process = ProcessInfo.readProcessInfo(in); } if (in.readBoolean()) { - jvm = JvmInfo.readJvmInfo(in); + jvm = new JvmInfo(in); } if (in.readBoolean()) { threadPool = ThreadPoolInfo.readThreadPoolInfo(in); diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index 1619ecee23a..d370a06cdc8 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -19,10 +19,9 @@ package org.elasticsearch.monitor.jvm; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,10 +40,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -/** - * - */ -public class JvmInfo implements Streamable, ToXContent { +public class JvmInfo implements Writeable, ToXContent { private static JvmInfo INSTANCE; @@ -61,100 +57,106 @@ public class JvmInfo implements Streamable, ToXContent { } catch (Exception e) { pid = -1; } - JvmInfo info = new JvmInfo(); - info.pid = pid; - info.startTime = runtimeMXBean.getStartTime(); - info.version = System.getProperty("java.version"); - info.vmName = runtimeMXBean.getVmName(); - info.vmVendor = runtimeMXBean.getVmVendor(); - info.vmVersion = runtimeMXBean.getVmVersion(); - info.mem = new Mem(); - info.mem.heapInit = memoryMXBean.getHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getInit(); - info.mem.heapMax = memoryMXBean.getHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getMax(); - info.mem.nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getInit(); - info.mem.nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getMax(); + + long heapInit = memoryMXBean.getHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getInit(); + long heapMax = memoryMXBean.getHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getMax(); + long nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getInit(); + long nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getMax(); + long directMemoryMax = 0; try { Class vmClass = Class.forName("sun.misc.VM"); - info.mem.directMemoryMax = (Long) vmClass.getMethod("maxDirectMemory").invoke(null); + directMemoryMax = (Long) vmClass.getMethod("maxDirectMemory").invoke(null); } catch (Exception t) { // ignore } - info.inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]); + String[] inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]); + Mem mem = new Mem(heapInit, heapMax, nonHeapInit, nonHeapMax, directMemoryMax); + + String bootClassPath; try { - info.bootClassPath = runtimeMXBean.getBootClassPath(); + bootClassPath = runtimeMXBean.getBootClassPath(); } catch (UnsupportedOperationException e) { // oracle java 9 - info.bootClassPath = System.getProperty("sun.boot.class.path"); - if (info.bootClassPath == null) { + bootClassPath = System.getProperty("sun.boot.class.path"); + if (bootClassPath == null) { // something else - info.bootClassPath = ""; + bootClassPath = ""; } } - info.classPath = runtimeMXBean.getClassPath(); - info.systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties()); + String classPath = runtimeMXBean.getClassPath(); + Map systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties()); List gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans(); - info.gcCollectors = new String[gcMxBeans.size()]; + String[] gcCollectors = new String[gcMxBeans.size()]; for (int i = 0; i < gcMxBeans.size(); i++) { GarbageCollectorMXBean gcMxBean = gcMxBeans.get(i); - info.gcCollectors[i] = gcMxBean.getName(); + gcCollectors[i] = gcMxBean.getName(); } List memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); - info.memoryPools = new String[memoryPoolMXBeans.size()]; + String[] memoryPools = new String[memoryPoolMXBeans.size()]; for (int i = 0; i < memoryPoolMXBeans.size(); i++) { MemoryPoolMXBean memoryPoolMXBean = memoryPoolMXBeans.get(i); - info.memoryPools[i] = memoryPoolMXBean.getName(); + memoryPools[i] = memoryPoolMXBean.getName(); } + String onError = null; + String onOutOfMemoryError = null; + String useCompressedOops = "unknown"; + String useG1GC = "unknown"; + long configuredInitialHeapSize = -1; + long configuredMaxHeapSize = -1; try { @SuppressWarnings("unchecked") Class clazz = - (Class)Class.forName("com.sun.management.HotSpotDiagnosticMXBean"); + (Class)Class.forName("com.sun.management.HotSpotDiagnosticMXBean"); Class vmOptionClazz = Class.forName("com.sun.management.VMOption"); PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz); Method vmOptionMethod = clazz.getMethod("getVMOption", String.class); Method valueMethod = vmOptionClazz.getMethod("getValue"); try { - Object onError = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnError"); - info.onError = (String) valueMethod.invoke(onError); + Object onErrorObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnError"); + onError = (String) valueMethod.invoke(onErrorObject); } catch (Exception ignored) { } try { - Object onOutOfMemoryError = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnOutOfMemoryError"); - info.onOutOfMemoryError = (String) valueMethod.invoke(onOutOfMemoryError); + Object onOutOfMemoryErrorObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnOutOfMemoryError"); + onOutOfMemoryError = (String) valueMethod.invoke(onOutOfMemoryErrorObject); } catch (Exception ignored) { } try { - Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); - info.useCompressedOops = (String) valueMethod.invoke(useCompressedOopsVmOption); + Object useCompressedOopsVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops"); + useCompressedOops = (String) valueMethod.invoke(useCompressedOopsVmOptionObject); } catch (Exception ignored) { } try { - Object useG1GCVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC"); - info.useG1GC = (String) valueMethod.invoke(useG1GCVmOption); + Object useG1GCVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC"); + useG1GC = (String) valueMethod.invoke(useG1GCVmOptionObject); } catch (Exception ignored) { } try { - Object initialHeapSizeVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "InitialHeapSize"); - info.configuredInitialHeapSize = Long.parseLong((String) valueMethod.invoke(initialHeapSizeVmOption)); + Object initialHeapSizeVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "InitialHeapSize"); + configuredInitialHeapSize = Long.parseLong((String) valueMethod.invoke(initialHeapSizeVmOptionObject)); } catch (Exception ignored) { } try { - Object maxHeapSizeVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "MaxHeapSize"); - info.configuredMaxHeapSize = Long.parseLong((String) valueMethod.invoke(maxHeapSizeVmOption)); + Object maxHeapSizeVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "MaxHeapSize"); + configuredMaxHeapSize = Long.parseLong((String) valueMethod.invoke(maxHeapSizeVmOptionObject)); } catch (Exception ignored) { } } catch (Exception ignored) { } - INSTANCE = info; + INSTANCE = new JvmInfo(pid, System.getProperty("java.version"), runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), + runtimeMXBean.getVmVendor(), runtimeMXBean.getStartTime(), configuredInitialHeapSize, configuredMaxHeapSize, + mem, inputArguments, bootClassPath, classPath, systemProperties, gcCollectors, memoryPools, onError, onOutOfMemoryError, + useCompressedOops, useG1GC); } public static JvmInfo jvmInfo() { @@ -166,40 +168,104 @@ public class JvmInfo implements Streamable, ToXContent { return INSTANCE; } - long pid = -1; + private final long pid; + private final String version; + private final String vmName; + private final String vmVersion; + private final String vmVendor; + private final long startTime; + private final long configuredInitialHeapSize; + private final long configuredMaxHeapSize; + private final Mem mem; + private final String[] inputArguments; + private final String bootClassPath; + private final String classPath; + private final Map systemProperties; + private final String[] gcCollectors; + private final String[] memoryPools; + private final String onError; + private final String onOutOfMemoryError; + private final String useCompressedOops; + private final String useG1GC; - String version = ""; - String vmName = ""; - String vmVersion = ""; - String vmVendor = ""; + private JvmInfo(long pid, String version, String vmName, String vmVersion, String vmVendor, long startTime, + long configuredInitialHeapSize, long configuredMaxHeapSize, Mem mem, String[] inputArguments, String bootClassPath, + String classPath, Map systemProperties, String[] gcCollectors, String[] memoryPools, String onError, + String onOutOfMemoryError, String useCompressedOops, String useG1GC) { + this.pid = pid; + this.version = version; + this.vmName = vmName; + this.vmVersion = vmVersion; + this.vmVendor = vmVendor; + this.startTime = startTime; + this.configuredInitialHeapSize = configuredInitialHeapSize; + this.configuredMaxHeapSize = configuredMaxHeapSize; + this.mem = mem; + this.inputArguments = inputArguments; + this.bootClassPath = bootClassPath; + this.classPath = classPath; + this.systemProperties = systemProperties; + this.gcCollectors = gcCollectors; + this.memoryPools = memoryPools; + this.onError = onError; + this.onOutOfMemoryError = onOutOfMemoryError; + this.useCompressedOops = useCompressedOops; + this.useG1GC = useG1GC; + } - long startTime = -1; + public JvmInfo(StreamInput in) throws IOException { + pid = in.readLong(); + version = in.readString(); + vmName = in.readString(); + vmVersion = in.readString(); + vmVendor = in.readString(); + startTime = in.readLong(); + inputArguments = new String[in.readInt()]; + for (int i = 0; i < inputArguments.length; i++) { + inputArguments[i] = in.readString(); + } + bootClassPath = in.readString(); + classPath = in.readString(); + systemProperties = new HashMap<>(); + int size = in.readInt(); + for (int i = 0; i < size; i++) { + systemProperties.put(in.readString(), in.readString()); + } + mem = new Mem(in); + gcCollectors = in.readStringArray(); + memoryPools = in.readStringArray(); + useCompressedOops = in.readString(); + //the following members are only used locally for boostrap checks, never serialized nor printed out + this.configuredMaxHeapSize = -1; + this.configuredInitialHeapSize = -1; + this.onError = null; + this.onOutOfMemoryError = null; + this.useG1GC = "unknown"; + } - private long configuredInitialHeapSize; - private long configuredMaxHeapSize; - - Mem mem; - - String[] inputArguments; - - String bootClassPath; - - String classPath; - - Map systemProperties; - - String[] gcCollectors = Strings.EMPTY_ARRAY; - String[] memoryPools = Strings.EMPTY_ARRAY; - - private String onError; - - private String onOutOfMemoryError; - - private String useCompressedOops = "unknown"; - - private String useG1GC = "unknown"; - - private JvmInfo() { + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(pid); + out.writeString(version); + out.writeString(vmName); + out.writeString(vmVersion); + out.writeString(vmVendor); + out.writeLong(startTime); + out.writeInt(inputArguments.length); + for (String inputArgument : inputArguments) { + out.writeString(inputArgument); + } + out.writeString(bootClassPath); + out.writeString(classPath); + out.writeInt(systemProperties.size()); + for (Map.Entry entry : systemProperties.entrySet()) { + out.writeString(entry.getKey()); + out.writeString(entry.getValue()); + } + mem.writeTo(out); + out.writeStringArray(gcCollectors); + out.writeStringArray(memoryPools); + out.writeString(useCompressedOops); } /** @@ -354,6 +420,14 @@ public class JvmInfo implements Streamable, ToXContent { return this.useG1GC; } + public String[] getGcCollectors() { + return gcCollectors; + } + + public String[] getMemoryPools() { + return memoryPools; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.JVM); @@ -407,72 +481,37 @@ public class JvmInfo implements Streamable, ToXContent { static final String USING_COMPRESSED_OOPS = "using_compressed_ordinary_object_pointers"; } - public static JvmInfo readJvmInfo(StreamInput in) throws IOException { - JvmInfo jvmInfo = new JvmInfo(); - jvmInfo.readFrom(in); - return jvmInfo; - } + public static class Mem implements Writeable { - @Override - public void readFrom(StreamInput in) throws IOException { - pid = in.readLong(); - version = in.readString(); - vmName = in.readString(); - vmVersion = in.readString(); - vmVendor = in.readString(); - startTime = in.readLong(); - inputArguments = new String[in.readInt()]; - for (int i = 0; i < inputArguments.length; i++) { - inputArguments[i] = in.readString(); + private final long heapInit; + private final long heapMax; + private final long nonHeapInit; + private final long nonHeapMax; + private final long directMemoryMax; + + public Mem(long heapInit, long heapMax, long nonHeapInit, long nonHeapMax, long directMemoryMax) { + this.heapInit = heapInit; + this.heapMax = heapMax; + this.nonHeapInit = nonHeapInit; + this.nonHeapMax = nonHeapMax; + this.directMemoryMax = directMemoryMax; } - bootClassPath = in.readString(); - classPath = in.readString(); - systemProperties = new HashMap<>(); - int size = in.readInt(); - for (int i = 0; i < size; i++) { - systemProperties.put(in.readString(), in.readString()); + + public Mem(StreamInput in) throws IOException { + this.heapInit = in.readVLong(); + this.heapMax = in.readVLong(); + this.nonHeapInit = in.readVLong(); + this.nonHeapMax = in.readVLong(); + this.directMemoryMax = in.readVLong(); } - mem = new Mem(); - mem.readFrom(in); - gcCollectors = in.readStringArray(); - memoryPools = in.readStringArray(); - useCompressedOops = in.readString(); - } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeLong(pid); - out.writeString(version); - out.writeString(vmName); - out.writeString(vmVersion); - out.writeString(vmVendor); - out.writeLong(startTime); - out.writeInt(inputArguments.length); - for (String inputArgument : inputArguments) { - out.writeString(inputArgument); - } - out.writeString(bootClassPath); - out.writeString(classPath); - out.writeInt(systemProperties.size()); - for (Map.Entry entry : systemProperties.entrySet()) { - out.writeString(entry.getKey()); - out.writeString(entry.getValue()); - } - mem.writeTo(out); - out.writeStringArray(gcCollectors); - out.writeStringArray(memoryPools); - out.writeString(useCompressedOops); - } - - public static class Mem implements Streamable { - - long heapInit = 0; - long heapMax = 0; - long nonHeapInit = 0; - long nonHeapMax = 0; - long directMemoryMax = 0; - - Mem() { + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(heapInit); + out.writeVLong(heapMax); + out.writeVLong(nonHeapInit); + out.writeVLong(nonHeapMax); + out.writeVLong(directMemoryMax); } public ByteSizeValue getHeapInit() { @@ -494,23 +533,5 @@ public class JvmInfo implements Streamable, ToXContent { public ByteSizeValue getDirectMemoryMax() { return new ByteSizeValue(directMemoryMax); } - - @Override - public void readFrom(StreamInput in) throws IOException { - heapInit = in.readVLong(); - heapMax = in.readVLong(); - nonHeapInit = in.readVLong(); - nonHeapMax = in.readVLong(); - directMemoryMax = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(heapInit); - out.writeVLong(heapMax); - out.writeVLong(nonHeapInit); - out.writeVLong(nonHeapMax); - out.writeVLong(directMemoryMax); - } } } diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java index 82b264ae1d6..aaedec94154 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java @@ -21,8 +21,12 @@ package org.elasticsearch.monitor.jvm; import org.apache.lucene.util.Constants; import org.elasticsearch.bootstrap.JavaVersion; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + public class JvmInfoTests extends ESTestCase { public void testUseG1GC() { @@ -58,4 +62,38 @@ public class JvmInfoTests extends ESTestCase { return argline.charAt(index - 1) == '+'; } + public void testSerialization() throws IOException { + JvmInfo jvmInfo = JvmInfo.jvmInfo(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + jvmInfo.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + JvmInfo deserializedJvmInfo = new JvmInfo(in); + assertEquals(jvmInfo.getVersion(), deserializedJvmInfo.getVersion()); + assertEquals(jvmInfo.getVmName(), deserializedJvmInfo.getVmName()); + assertEquals(jvmInfo.getVmVendor(), deserializedJvmInfo.getVmVendor()); + assertEquals(jvmInfo.getVmVersion(), deserializedJvmInfo.getVmVersion()); + assertEquals(jvmInfo.getBootClassPath(), deserializedJvmInfo.getBootClassPath()); + assertEquals(jvmInfo.getClassPath(), deserializedJvmInfo.getClassPath()); + assertEquals(jvmInfo.getPid(), deserializedJvmInfo.getPid()); + assertEquals(jvmInfo.getStartTime(), deserializedJvmInfo.getStartTime()); + assertEquals(jvmInfo.versionUpdatePack(), deserializedJvmInfo.versionUpdatePack()); + assertEquals(jvmInfo.versionAsInteger(), deserializedJvmInfo.versionAsInteger()); + assertEquals(jvmInfo.getMem().getDirectMemoryMax(), deserializedJvmInfo.getMem().getDirectMemoryMax()); + assertEquals(jvmInfo.getMem().getHeapInit(), deserializedJvmInfo.getMem().getHeapInit()); + assertEquals(jvmInfo.getMem().getHeapMax(), deserializedJvmInfo.getMem().getHeapMax()); + assertEquals(jvmInfo.getMem().getNonHeapInit(), deserializedJvmInfo.getMem().getNonHeapInit()); + assertEquals(jvmInfo.getMem().getNonHeapMax(), deserializedJvmInfo.getMem().getNonHeapMax()); + assertEquals(jvmInfo.getSystemProperties(), deserializedJvmInfo.getSystemProperties()); + assertArrayEquals(jvmInfo.getInputArguments(), deserializedJvmInfo.getInputArguments()); + assertArrayEquals(jvmInfo.getGcCollectors(), deserializedJvmInfo.getGcCollectors()); + assertArrayEquals(jvmInfo.getMemoryPools(), deserializedJvmInfo.getMemoryPools()); + assertEquals(jvmInfo.useCompressedOops(), deserializedJvmInfo.useCompressedOops()); + assertEquals(-1, deserializedJvmInfo.getConfiguredInitialHeapSize()); + assertEquals(-1, deserializedJvmInfo.getConfiguredMaxHeapSize()); + assertEquals("unknown", deserializedJvmInfo.useG1GC()); + assertNull(deserializedJvmInfo.onError()); + assertNull(deserializedJvmInfo.onOutOfMemoryError()); + } + } + } } From 27e7fc734c74207f1a2b0df9965f1ef21d2c7492 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 17:21:53 +0200 Subject: [PATCH 07/40] HttpInfo to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../java/org/elasticsearch/http/HttpInfo.java | 39 +++++++------------ 2 files changed, 14 insertions(+), 27 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 93141b74f9f..68ec0f39e6d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -217,7 +217,7 @@ public class NodeInfo extends BaseNodeResponse { transport = TransportInfo.readTransportInfo(in); } if (in.readBoolean()) { - http = HttpInfo.readHttpInfo(in); + http = new HttpInfo(in); } if (in.readBoolean()) { plugins = new PluginsAndModules(); diff --git a/core/src/main/java/org/elasticsearch/http/HttpInfo.java b/core/src/main/java/org/elasticsearch/http/HttpInfo.java index 0f285974e8a..e8f3985a23a 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpInfo.java +++ b/core/src/main/java/org/elasticsearch/http/HttpInfo.java @@ -21,7 +21,7 @@ package org.elasticsearch.http; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -29,15 +29,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -/** - * - */ -public class HttpInfo implements Streamable, ToXContent { +public class HttpInfo implements Writeable, ToXContent { - private BoundTransportAddress address; - private long maxContentLength; + private final BoundTransportAddress address; + private final long maxContentLength; - HttpInfo() { + public HttpInfo(StreamInput in) throws IOException { + address = BoundTransportAddress.readBoundTransportAddress(in); + maxContentLength = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + address.writeTo(out); + out.writeLong(maxContentLength); } public HttpInfo(BoundTransportAddress address, long maxContentLength) { @@ -63,24 +68,6 @@ public class HttpInfo implements Streamable, ToXContent { return builder; } - public static HttpInfo readHttpInfo(StreamInput in) throws IOException { - HttpInfo info = new HttpInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - address = BoundTransportAddress.readBoundTransportAddress(in); - maxContentLength = in.readLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - address.writeTo(out); - out.writeLong(maxContentLength); - } - public BoundTransportAddress address() { return address; } From 2370c25fa4ec1b8ddd68915c486c5a9473c33de0 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:28:49 +0200 Subject: [PATCH 08/40] ThreadPoolInfo to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../threadpool/ThreadPoolInfo.java | 37 +++++++------------ 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 68ec0f39e6d..d697236472f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -211,7 +211,7 @@ public class NodeInfo extends BaseNodeResponse { jvm = new JvmInfo(in); } if (in.readBoolean()) { - threadPool = ThreadPoolInfo.readThreadPoolInfo(in); + threadPool = new ThreadPoolInfo(in); } if (in.readBoolean()) { transport = TransportInfo.readTransportInfo(in); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java index 729c6cb7364..5b9dadc85ec 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java @@ -21,49 +21,33 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.List; -/** - */ -public class ThreadPoolInfo implements Streamable, Iterable, ToXContent { - - private List infos; - - ThreadPoolInfo() { - } +public class ThreadPoolInfo implements Writeable, Iterable, ToXContent { + private final List infos; public ThreadPoolInfo(List infos) { - this.infos = infos; + this.infos = Collections.unmodifiableList(infos); } - @Override - public Iterator iterator() { - return infos.iterator(); - } - - public static ThreadPoolInfo readThreadPoolInfo(StreamInput in) throws IOException { - ThreadPoolInfo info = new ThreadPoolInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { + public ThreadPoolInfo(StreamInput in) throws IOException { int size = in.readVInt(); - infos = new ArrayList<>(size); + List infos = new ArrayList<>(size); for (int i = 0; i < size; i++) { ThreadPool.Info info = new ThreadPool.Info(); info.readFrom(in); infos.add(info); } + this.infos = Collections.unmodifiableList(infos); } @Override @@ -74,6 +58,11 @@ public class ThreadPoolInfo implements Streamable, Iterable, To } } + @Override + public Iterator iterator() { + return infos.iterator(); + } + static final class Fields { static final String THREAD_POOL = "thread_pool"; } From 536d13ff1131cb2a11aff7d0f7a1c3a84f4e7265 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:41:04 +0200 Subject: [PATCH 09/40] ProcessInfo to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../monitor/process/ProcessInfo.java | 51 ++++++++----------- .../monitor/process/ProcessProbe.java | 4 +- .../monitor/process/ProcessService.java | 4 +- .../monitor/os/OsProbeTests.java | 8 +-- .../monitor/process/ProcessProbeTests.java | 11 ++-- .../nodesinfo/NodeInfoStreamingTests.java | 2 +- .../index/reindex/RoundTripTests.java | 8 --- .../org/elasticsearch/test/ESTestCase.java | 8 +++ 9 files changed, 41 insertions(+), 57 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index d697236472f..ef229bb34bd 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -205,7 +205,7 @@ public class NodeInfo extends BaseNodeResponse { os = new OsInfo(in); } if (in.readBoolean()) { - process = ProcessInfo.readProcessInfo(in); + process = new ProcessInfo(in); } if (in.readBoolean()) { jvm = new JvmInfo(in); diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java index cf9c9e63b87..a0e3e7a70f2 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java @@ -21,26 +21,35 @@ package org.elasticsearch.monitor.process; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -public class ProcessInfo implements Streamable, ToXContent { +public class ProcessInfo implements Writeable, ToXContent { - long refreshInterval; + private final long refreshInterval; + private final long id; + private final boolean mlockall; - private long id; - - private boolean mlockall; - - ProcessInfo() { - } - - public ProcessInfo(long id, boolean mlockall) { + public ProcessInfo(long id, boolean mlockall, long refreshInterval) { this.id = id; this.mlockall = mlockall; + this.refreshInterval = refreshInterval; + } + + public ProcessInfo(StreamInput in) throws IOException { + refreshInterval = in.readLong(); + id = in.readLong(); + mlockall = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(refreshInterval); + out.writeLong(id); + out.writeBoolean(mlockall); } public long refreshInterval() { @@ -79,24 +88,4 @@ public class ProcessInfo implements Streamable, ToXContent { builder.endObject(); return builder; } - - public static ProcessInfo readProcessInfo(StreamInput in) throws IOException { - ProcessInfo info = new ProcessInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - refreshInterval = in.readLong(); - id = in.readLong(); - mlockall = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeLong(refreshInterval); - out.writeLong(id); - out.writeBoolean(mlockall); - } } diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java index b19b54a9478..e4307f724c5 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java @@ -126,8 +126,8 @@ public class ProcessProbe { return -1; } - public ProcessInfo processInfo() { - return new ProcessInfo(jvmInfo().pid(), BootstrapInfo.isMemoryLocked()); + public ProcessInfo processInfo(long refreshInterval) { + return new ProcessInfo(jvmInfo().pid(), BootstrapInfo.isMemoryLocked(), refreshInterval); } public ProcessStats processStats() { diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 38593534480..99593003b34 100644 --- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -42,11 +42,9 @@ public final class ProcessService extends AbstractComponent { public ProcessService(Settings settings) { super(settings); this.probe = ProcessProbe.getInstance(); - final TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings); processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats()); - this.info = probe.processInfo(); - this.info.refreshInterval = refreshInterval.millis(); + this.info = probe.processInfo(refreshInterval.millis()); logger.debug("using refresh_interval [{}]", refreshInterval); } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index ea15487a3b4..2674f726428 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class OsProbeTests extends ESTestCase { - private OsProbe probe = OsProbe.getInstance(); + private final OsProbe probe = OsProbe.getInstance(); public void testOsInfo() { int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors()); @@ -40,11 +40,7 @@ public class OsProbeTests extends ESTestCase { if (randomBoolean()) { refreshInterval = -1; } else { - refreshInterval = randomLong(); - while (refreshInterval == Long.MIN_VALUE) { - refreshInterval = randomLong(); - } - refreshInterval = Math.abs(refreshInterval); + refreshInterval = randomPositiveLong(); } OsInfo info = probe.osInfo(refreshInterval, allocatedProcessors); assertNotNull(info); diff --git a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java index 8e6016f6f98..5423242ccd8 100644 --- a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java @@ -33,14 +33,15 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ProcessProbeTests extends ESTestCase { - ProcessProbe probe = ProcessProbe.getInstance(); + private final ProcessProbe probe = ProcessProbe.getInstance(); public void testProcessInfo() { - ProcessInfo info = probe.processInfo(); + long refreshInterval = randomPositiveLong(); + ProcessInfo info = probe.processInfo(refreshInterval); assertNotNull(info); - assertThat(info.getRefreshInterval(), greaterThanOrEqualTo(0L)); - assertThat(info.getId(), equalTo(jvmInfo().pid())); - assertThat(info.isMlockall(), equalTo(BootstrapInfo.isMemoryLocked())); + assertEquals(refreshInterval, info.getRefreshInterval()); + assertEquals(jvmInfo().pid(), info.getId()); + assertEquals(BootstrapInfo.isMemoryLocked(), info.isMlockall()); } public void testProcessStats() { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 7cd4e355218..7a3f8706dad 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -128,7 +128,7 @@ public class NodeInfoStreamingTests extends ESTestCase { serviceAttributes.put("test", "attribute"); Settings settings = Settings.builder().put("test", "setting").build(); OsInfo osInfo = DummyOsInfo.INSTANCE; - ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean()); + ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()); JvmInfo jvm = JvmInfo.jvmInfo(); List threadPoolInfos = new ArrayList<>(); threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index b0fc9b428ba..1a262a32d3d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -206,14 +206,6 @@ public class RoundTripTests extends ESTestCase { emptyMap()); // Params } - private long randomPositiveLong() { - long l; - do { - l = randomLong(); - } while (l < 0); - return l; - } - private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) { assertEquals(expected.getTook(), actual.getTook()); assertTaskStatusEquals(expected.getStatus(), actual.getStatus()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index b0575d74817..f8098420d54 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -302,6 +302,14 @@ public abstract class ESTestCase extends LuceneTestCase { return random().nextInt(); } + public static long randomPositiveLong() { + long positiveLong = randomLong(); + while (positiveLong == Long.MIN_VALUE) { + positiveLong = randomLong(); + } + return Math.abs(positiveLong); + } + public static float randomFloat() { return random().nextFloat(); } From 2b2fb8daed5ba6fa4f3141a2ab8f0d8d0eb5edfe Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:42:25 +0200 Subject: [PATCH 10/40] TransportInfo to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../transport/TransportInfo.java | 71 ++++++++----------- 2 files changed, 30 insertions(+), 43 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index ef229bb34bd..9e198cc6296 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -214,7 +214,7 @@ public class NodeInfo extends BaseNodeResponse { threadPool = new ThreadPoolInfo(in); } if (in.readBoolean()) { - transport = TransportInfo.readTransportInfo(in); + transport = new TransportInfo(in); } if (in.readBoolean()) { http = new HttpInfo(in); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportInfo.java b/core/src/main/java/org/elasticsearch/transport/TransportInfo.java index 236c0d50a98..fbabf49b65d 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportInfo.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportInfo.java @@ -22,7 +22,7 @@ package org.elasticsearch.transport; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,56 +31,17 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -/** - * - */ -public class TransportInfo implements Streamable, ToXContent { +public class TransportInfo implements Writeable, ToXContent { private BoundTransportAddress address; private Map profileAddresses; - TransportInfo() { - } - public TransportInfo(BoundTransportAddress address, @Nullable Map profileAddresses) { this.address = address; this.profileAddresses = profileAddresses; } - static final class Fields { - static final String TRANSPORT = "transport"; - static final String BOUND_ADDRESS = "bound_address"; - static final String PUBLISH_ADDRESS = "publish_address"; - static final String PROFILES = "profiles"; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(Fields.TRANSPORT); - builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses()); - builder.field(Fields.PUBLISH_ADDRESS, address.publishAddress().toString()); - builder.startObject(Fields.PROFILES); - if (profileAddresses != null && profileAddresses.size() > 0) { - for (Map.Entry entry : profileAddresses.entrySet()) { - builder.startObject(entry.getKey()); - builder.array(Fields.BOUND_ADDRESS, (Object[]) entry.getValue().boundAddresses()); - builder.field(Fields.PUBLISH_ADDRESS, entry.getValue().publishAddress().toString()); - builder.endObject(); - } - } - builder.endObject(); - builder.endObject(); - return builder; - } - - public static TransportInfo readTransportInfo(StreamInput in) throws IOException { - TransportInfo info = new TransportInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { + public TransportInfo(StreamInput in) throws IOException { address = BoundTransportAddress.readBoundTransportAddress(in); int size = in.readVInt(); if (size > 0) { @@ -109,6 +70,32 @@ public class TransportInfo implements Streamable, ToXContent { } } + static final class Fields { + static final String TRANSPORT = "transport"; + static final String BOUND_ADDRESS = "bound_address"; + static final String PUBLISH_ADDRESS = "publish_address"; + static final String PROFILES = "profiles"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.TRANSPORT); + builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses()); + builder.field(Fields.PUBLISH_ADDRESS, address.publishAddress().toString()); + builder.startObject(Fields.PROFILES); + if (profileAddresses != null && profileAddresses.size() > 0) { + for (Map.Entry entry : profileAddresses.entrySet()) { + builder.startObject(entry.getKey()); + builder.array(Fields.BOUND_ADDRESS, (Object[]) entry.getValue().boundAddresses()); + builder.field(Fields.PUBLISH_ADDRESS, entry.getValue().publishAddress().toString()); + builder.endObject(); + } + } + builder.endObject(); + builder.endObject(); + return builder; + } + public BoundTransportAddress address() { return address; } From e98e37295a86c2c406be1fac698a2649ddca699b Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:52:21 +0200 Subject: [PATCH 11/40] PluginsAndModules to implement Writeable rather than Streamable --- .../admin/cluster/node/info/NodeInfo.java | 3 +- .../cluster/node/info/PluginsAndModules.java | 68 +++++++++---------- .../elasticsearch/plugins/PluginsService.java | 17 ++--- .../nodesinfo/NodeInfoStreamingTests.java | 6 +- .../plugins/PluginInfoTests.java | 19 +++--- 5 files changed, 58 insertions(+), 55 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 9e198cc6296..cb22c6352a7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -220,8 +220,7 @@ public class NodeInfo extends BaseNodeResponse { http = new HttpInfo(in); } if (in.readBoolean()) { - plugins = new PluginsAndModules(); - plugins.readFrom(in); + plugins = new PluginsAndModules(in); } if (in.readBoolean()) { ingest = new IngestInfo(in); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java index 3831fd24f3e..4c7fa7fa2ce 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.node.info; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.PluginInfo; @@ -34,13 +34,40 @@ import java.util.List; /** * Information about plugins and modules */ -public class PluginsAndModules implements Streamable, ToXContent { - private List plugins; - private List modules; +public class PluginsAndModules implements Writeable, ToXContent { + private final List plugins; + private final List modules; - public PluginsAndModules() { - plugins = new ArrayList<>(); - modules = new ArrayList<>(); + public PluginsAndModules(List plugins, List modules) { + this.plugins = Collections.unmodifiableList(plugins); + this.modules = Collections.unmodifiableList(modules); + } + + public PluginsAndModules(StreamInput in) throws IOException { + int pluginsSize = in.readInt(); + List plugins = new ArrayList<>(pluginsSize); + for (int i = 0; i < pluginsSize; i++) { + plugins.add(PluginInfo.readFromStream(in)); + } + this.plugins = Collections.unmodifiableList(plugins); + int modulesSize = in.readInt(); + List modules = new ArrayList<>(modulesSize); + for (int i = 0; i < modulesSize; i++) { + modules.add(PluginInfo.readFromStream(in)); + } + this.modules = Collections.unmodifiableList(modules); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(plugins.size()); + for (PluginInfo plugin : getPluginInfos()) { + plugin.writeTo(out); + } + out.writeInt(modules.size()); + for (PluginInfo module : getModuleInfos()) { + module.writeTo(out); + } } /** @@ -69,33 +96,6 @@ public class PluginsAndModules implements Streamable, ToXContent { modules.add(info); } - @Override - public void readFrom(StreamInput in) throws IOException { - if (plugins.isEmpty() == false || modules.isEmpty() == false) { - throw new IllegalStateException("instance is already populated"); - } - int plugins_size = in.readInt(); - for (int i = 0; i < plugins_size; i++) { - plugins.add(PluginInfo.readFromStream(in)); - } - int modules_size = in.readInt(); - for (int i = 0; i < modules_size; i++) { - modules.add(PluginInfo.readFromStream(in)); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeInt(plugins.size()); - for (PluginInfo plugin : getPluginInfos()) { - plugin.writeTo(out); - } - out.writeInt(modules.size()); - for (PluginInfo module : getModuleInfos()) { - module.writeTo(out); - } - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray("plugins"); diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index e13bdbec879..01704e9ed86 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -108,10 +108,9 @@ public class PluginsService extends AbstractComponent { */ public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection> classpathPlugins) { super(settings); - info = new PluginsAndModules(); List> pluginsLoaded = new ArrayList<>(); - + List pluginsList = new ArrayList<>(); // first we load plugins that are on the classpath. this is for tests and transport clients for (Class pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings); @@ -120,9 +119,10 @@ public class PluginsService extends AbstractComponent { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } pluginsLoaded.add(new Tuple<>(pluginInfo, plugin)); - info.addPlugin(pluginInfo); + pluginsList.add(pluginInfo); } + List modulesList = new ArrayList<>(); // load modules if (modulesDirectory != null) { try { @@ -130,7 +130,7 @@ public class PluginsService extends AbstractComponent { List> loaded = loadBundles(bundles); pluginsLoaded.addAll(loaded); for (Tuple module : loaded) { - info.addModule(module.v1()); + modulesList.add(module.v1()); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize modules", ex); @@ -144,18 +144,19 @@ public class PluginsService extends AbstractComponent { List> loaded = loadBundles(bundles); pluginsLoaded.addAll(loaded); for (Tuple plugin : loaded) { - info.addPlugin(plugin.v1()); + pluginsList.add(plugin.v1()); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); } } - plugins = Collections.unmodifiableList(pluginsLoaded); + this.info = new PluginsAndModules(pluginsList, modulesList); + this.plugins = Collections.unmodifiableList(pluginsLoaded); // We need to build a List of plugins for checking mandatory plugins Set pluginsNames = new HashSet<>(); - for (Tuple tuple : plugins) { + for (Tuple tuple : this.plugins) { pluginsNames.add(tuple.v1().getName()); } @@ -179,7 +180,7 @@ public class PluginsService extends AbstractComponent { logPluginInfo(info.getPluginInfos(), "plugin", logger); Map> onModuleReferences = new HashMap<>(); - for (Tuple pluginEntry : plugins) { + for (Tuple pluginEntry : this.plugins) { Plugin plugin = pluginEntry.v2(); List list = new ArrayList<>(); for (Method method : plugin.getClass().getMethods()) { diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 7a3f8706dad..895cdebd68a 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -138,9 +138,9 @@ public class NodeInfoStreamingTests extends ESTestCase { profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); - PluginsAndModules plugins = new PluginsAndModules(); - plugins.addModule(DummyPluginInfo.INSTANCE); - plugins.addPlugin(DummyPluginInfo.INSTANCE); + + PluginsAndModules plugins = new PluginsAndModules(Collections.singletonList(DummyPluginInfo.INSTANCE), + Collections.singletonList(DummyPluginInfo.INSTANCE)); IngestInfo ingestInfo = new IngestInfo(Collections.emptyList()); ByteSizeValue indexingBuffer; if (random().nextBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 73b31b92637..4ad52be8866 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -23,8 +23,9 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.test.ESTestCase; -import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -201,15 +202,17 @@ public class PluginInfoTests extends ESTestCase { } public void testPluginListSorted() { - PluginsAndModules pluginsInfo = new PluginsAndModules(); - pluginsInfo.addPlugin(new PluginInfo("c", "foo", "dummy", "dummyclass")); - pluginsInfo.addPlugin(new PluginInfo("b", "foo", "dummy", "dummyclass")); - pluginsInfo.addPlugin(new PluginInfo("e", "foo", "dummy", "dummyclass")); - pluginsInfo.addPlugin(new PluginInfo("a", "foo", "dummy", "dummyclass")); - pluginsInfo.addPlugin(new PluginInfo("d", "foo", "dummy", "dummyclass")); + List plugins = new ArrayList<>(); + plugins.add(new PluginInfo("c", "foo", "dummy", "dummyclass")); + plugins.add(new PluginInfo("b", "foo", "dummy", "dummyclass")); + plugins.add(new PluginInfo("e", "foo", "dummy", "dummyclass")); + plugins.add(new PluginInfo("a", "foo", "dummy", "dummyclass")); + plugins.add(new PluginInfo("d", "foo", "dummy", "dummyclass")); + PluginsAndModules pluginsInfo = new PluginsAndModules(plugins, Collections.emptyList()); + final List infos = pluginsInfo.getPluginInfos(); - List names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList()); + List names = infos.stream().map(PluginInfo::getName).collect(Collectors.toList()); assertThat(names, contains("a", "b", "c", "d", "e")); } } From 555db744f1af224cf636020a678e20cd1890806b Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:56:38 +0200 Subject: [PATCH 12/40] use read/writeOptionalWriteable in NodeInfo serialization code --- .../admin/cluster/node/info/NodeInfo.java | 88 ++++--------------- 1 file changed, 16 insertions(+), 72 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index cb22c6352a7..5389e95ad78 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -201,30 +201,14 @@ public class NodeInfo extends BaseNodeResponse { if (in.readBoolean()) { settings = Settings.readSettingsFromStream(in); } - if (in.readBoolean()) { - os = new OsInfo(in); - } - if (in.readBoolean()) { - process = new ProcessInfo(in); - } - if (in.readBoolean()) { - jvm = new JvmInfo(in); - } - if (in.readBoolean()) { - threadPool = new ThreadPoolInfo(in); - } - if (in.readBoolean()) { - transport = new TransportInfo(in); - } - if (in.readBoolean()) { - http = new HttpInfo(in); - } - if (in.readBoolean()) { - plugins = new PluginsAndModules(in); - } - if (in.readBoolean()) { - ingest = new IngestInfo(in); - } + os = in.readOptionalWriteable(OsInfo::new); + process = in.readOptionalWriteable(ProcessInfo::new); + jvm = in.readOptionalWriteable(JvmInfo::new); + threadPool = in.readOptionalWriteable(ThreadPoolInfo::new); + transport = in.readOptionalWriteable(TransportInfo::new); + http = in.readOptionalWriteable(HttpInfo::new); + plugins = in.readOptionalWriteable(PluginsAndModules::new); + ingest = in.readOptionalWriteable(IngestInfo::new); } @Override @@ -244,53 +228,13 @@ public class NodeInfo extends BaseNodeResponse { out.writeBoolean(true); Settings.writeSettingsToStream(settings, out); } - if (os == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - os.writeTo(out); - } - if (process == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - process.writeTo(out); - } - if (jvm == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - jvm.writeTo(out); - } - if (threadPool == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - threadPool.writeTo(out); - } - if (transport == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - transport.writeTo(out); - } - if (http == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - http.writeTo(out); - } - if (plugins == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - plugins.writeTo(out); - } - if (ingest == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - ingest.writeTo(out); - } + out.writeOptionalWriteable(os); + out.writeOptionalWriteable(process); + out.writeOptionalWriteable(jvm); + out.writeOptionalWriteable(threadPool); + out.writeOptionalWriteable(transport); + out.writeOptionalWriteable(http); + out.writeOptionalWriteable(plugins); + out.writeOptionalWriteable(ingest); } } From 84b8c9de194ef019d1efd239485343673bc3164d Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 18:59:51 +0200 Subject: [PATCH 13/40] PluginInfo to implement Writeable rather than Streamable --- .../cluster/node/info/PluginsAndModules.java | 4 +- .../org/elasticsearch/plugins/PluginInfo.java | 52 ++++++++----------- 2 files changed, 23 insertions(+), 33 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java index 4c7fa7fa2ce..3a9658ea816 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java @@ -47,13 +47,13 @@ public class PluginsAndModules implements Writeable, ToXContent { int pluginsSize = in.readInt(); List plugins = new ArrayList<>(pluginsSize); for (int i = 0; i < pluginsSize; i++) { - plugins.add(PluginInfo.readFromStream(in)); + plugins.add(new PluginInfo(in)); } this.plugins = Collections.unmodifiableList(plugins); int modulesSize = in.readInt(); List modules = new ArrayList<>(modulesSize); for (int i = 0; i < modulesSize; i++) { - modules.add(PluginInfo.readFromStream(in)); + modules.add(new PluginInfo(in)); } this.modules = Collections.unmodifiableList(modules); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 500861d8999..d2b5e0368c4 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -32,7 +32,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Properties; -public class PluginInfo implements Streamable, ToXContent { +public class PluginInfo implements Writeable, ToXContent { public static final String ES_PLUGIN_PROPERTIES = "plugin-descriptor.properties"; public static final String ES_PLUGIN_POLICY = "plugin-security.policy"; @@ -45,13 +45,10 @@ public class PluginInfo implements Streamable, ToXContent { static final String CLASSNAME = "classname"; } - private String name; - private String description; - private String version; - private String classname; - - public PluginInfo() { - } + private final String name; + private final String description; + private final String version; + private final String classname; /** * Information about plugins @@ -67,6 +64,21 @@ public class PluginInfo implements Streamable, ToXContent { this.classname = classname; } + public PluginInfo(StreamInput in) throws IOException { + this.name = in.readString(); + this.description = in.readString(); + this.version = in.readString(); + this.classname = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(description); + out.writeString(version); + out.writeString(classname); + } + /** reads (and validates) plugin metadata descriptor file */ public static PluginInfo readFromProperties(Path dir) throws IOException { Path descriptor = dir.resolve(ES_PLUGIN_PROPERTIES); @@ -138,28 +150,6 @@ public class PluginInfo implements Streamable, ToXContent { return version; } - public static PluginInfo readFromStream(StreamInput in) throws IOException { - PluginInfo info = new PluginInfo(); - info.readFrom(in); - return info; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - this.name = in.readString(); - this.description = in.readString(); - this.version = in.readString(); - this.classname = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeString(description); - out.writeString(version); - out.writeString(classname); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); From 774244a61fc12676acf0dd83468977b4af601433 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 19:05:13 +0200 Subject: [PATCH 14/40] ThreadPool.Info and SizeValue to implement Writeable rather than Streamable --- .../elasticsearch/common/unit/SizeValue.java | 43 ++++------ .../elasticsearch/threadpool/ThreadPool.java | 80 +++++++------------ .../threadpool/ThreadPoolInfo.java | 4 +- .../ThreadPoolSerializationTests.java | 9 +-- 4 files changed, 45 insertions(+), 91 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java index e04dfe51430..cba51f29eeb 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java @@ -23,22 +23,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; -/** - * - */ -public class SizeValue implements Streamable { +public class SizeValue implements Writeable { - private long size; - - private SizeUnit sizeUnit; - - private SizeValue() { - - } + private final long size; + private final SizeUnit sizeUnit; public SizeValue(long singles) { this(singles, SizeUnit.SINGLE); @@ -52,6 +44,16 @@ public class SizeValue implements Streamable { this.sizeUnit = sizeUnit; } + public SizeValue(StreamInput in) throws IOException { + size = in.readVLong(); + sizeUnit = SizeUnit.SINGLE; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(singles()); + } + public long singles() { return sizeUnit.toSingles(size); } @@ -194,23 +196,6 @@ public class SizeValue implements Streamable { return new SizeValue(singles, SizeUnit.SINGLE); } - public static SizeValue readSizeValue(StreamInput in) throws IOException { - SizeValue sizeValue = new SizeValue(); - sizeValue.readFrom(in); - return sizeValue; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - size = in.readVLong(); - sizeUnit = SizeUnit.SINGLE; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(singles()); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 38fd73f73ec..d2ff4defc9e 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; @@ -529,18 +529,14 @@ public class ThreadPool extends AbstractComponent implements Closeable { } } - public static class Info implements Streamable, ToXContent { + public static class Info implements Writeable, ToXContent { - private String name; - private ThreadPoolType type; - private int min; - private int max; - private TimeValue keepAlive; - private SizeValue queueSize; - - Info() { - - } + private final String name; + private final ThreadPoolType type; + private final int min; + private final int max; + private final TimeValue keepAlive; + private final SizeValue queueSize; public Info(String name, ThreadPoolType type) { this(name, type, -1); @@ -559,6 +555,25 @@ public class ThreadPool extends AbstractComponent implements Closeable { this.queueSize = queueSize; } + public Info(StreamInput in) throws IOException { + name = in.readString(); + type = ThreadPoolType.fromType(in.readString()); + min = in.readInt(); + max = in.readInt(); + keepAlive = in.readOptionalWriteable(TimeValue::new); + queueSize = in.readOptionalWriteable(SizeValue::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(type.getType()); + out.writeInt(min); + out.writeInt(max); + out.writeOptionalWriteable(keepAlive); + out.writeOptionalWriteable(queueSize); + } + public String getName() { return this.name; } @@ -585,46 +600,6 @@ public class ThreadPool extends AbstractComponent implements Closeable { return this.queueSize; } - @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString(); - type = ThreadPoolType.fromType(in.readString()); - min = in.readInt(); - max = in.readInt(); - if (in.readBoolean()) { - keepAlive = new TimeValue(in); - } - if (in.readBoolean()) { - queueSize = SizeValue.readSizeValue(in); - } - in.readBoolean(); // here to conform with removed waitTime - in.readBoolean(); // here to conform with removed rejected setting - in.readBoolean(); // here to conform with queue type - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeString(type.getType()); - out.writeInt(min); - out.writeInt(max); - if (keepAlive == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - keepAlive.writeTo(out); - } - if (queueSize == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - queueSize.writeTo(out); - } - out.writeBoolean(false); // here to conform with removed waitTime - out.writeBoolean(false); // here to conform with removed rejected setting - out.writeBoolean(false); // here to conform with queue type - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); @@ -654,7 +629,6 @@ public class ThreadPool extends AbstractComponent implements Closeable { static final String KEEP_ALIVE = "keep_alive"; static final String QUEUE_SIZE = "queue_size"; } - } /** diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java index 5b9dadc85ec..d30d872691c 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java @@ -43,9 +43,7 @@ public class ThreadPoolInfo implements Writeable, Iterable, ToX int size = in.readVInt(); List infos = new ArrayList<>(size); for (int i = 0; i < size; i++) { - ThreadPool.Info info = new ThreadPool.Info(); - info.readFrom(in); - infos.add(info); + infos.add(new ThreadPool.Info(in)); } this.infos = Collections.unmodifiableList(infos); } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 14cf10b8f31..b0ac43cc5bc 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -59,8 +59,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { info.writeTo(output); StreamInput input = output.bytes().streamInput(); - ThreadPool.Info newInfo = new ThreadPool.Info(); - newInfo.readFrom(input); + ThreadPool.Info newInfo = new ThreadPool.Info(input); assertThat(newInfo.getQueueSize().singles(), is(10000L)); } @@ -71,8 +70,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { info.writeTo(output); StreamInput input = output.bytes().streamInput(); - ThreadPool.Info newInfo = new ThreadPool.Info(); - newInfo.readFrom(input); + ThreadPool.Info newInfo = new ThreadPool.Info(input); assertThat(newInfo.getQueueSize(), is(nullValue())); } @@ -126,8 +124,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { info.writeTo(output); StreamInput input = output.bytes().streamInput(); - ThreadPool.Info newInfo = new ThreadPool.Info(); - newInfo.readFrom(input); + ThreadPool.Info newInfo = new ThreadPool.Info(input); assertThat(newInfo.getThreadPoolType(), is(threadPoolType)); } From 68eb58f9e37a9cd92a50026a443a496cbb746447 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 19:11:01 +0200 Subject: [PATCH 15/40] [TEST] use randomPositiveLong where possible --- .../org/elasticsearch/common/unit/ByteSizeValueTests.java | 7 +------ .../java/org/elasticsearch/monitor/os/OsInfoTests.java | 6 +----- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java index 0eada4f2cde..5104e56cb35 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java @@ -171,12 +171,7 @@ public class ByteSizeValueTests extends ESTestCase { } public void testSerialization() throws IOException { - //negative values cannot be serialized at the moment, we do abs but that is not enough with Long.MIN_VALUE - long l = Long.MIN_VALUE; - while (l == Long.MIN_VALUE) { - l = randomLong(); - } - ByteSizeValue byteSizeValue = new ByteSizeValue(Math.abs(l), randomFrom(ByteSizeUnit.values())); + ByteSizeValue byteSizeValue = new ByteSizeValue(randomPositiveLong(), randomFrom(ByteSizeUnit.values())); try (BytesStreamOutput out = new BytesStreamOutput()) { byteSizeValue.writeTo(out); try (StreamInput in = out.bytes().streamInput()) { diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java index ed549b38e13..f96f6e47864 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java @@ -34,11 +34,7 @@ public class OsInfoTests extends ESTestCase { if (randomBoolean()) { refreshInterval = -1; } else { - refreshInterval = randomLong(); - while (refreshInterval == Long.MIN_VALUE) { - refreshInterval = randomLong(); - } - refreshInterval = Math.abs(refreshInterval); + refreshInterval = randomPositiveLong(); } String name = randomAsciiOfLengthBetween(3, 10); String arch = randomAsciiOfLengthBetween(3, 10); From 6873454f33b35c58df3402c38e1e447fb3bd7a80 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 20:12:02 +0200 Subject: [PATCH 16/40] use read/writeList and readMap where possible --- .../cluster/node/info/PluginsAndModules.java | 24 ++++--------------- .../elasticsearch/monitor/jvm/JvmInfo.java | 9 ++----- .../threadpool/ThreadPoolInfo.java | 13 ++-------- 3 files changed, 8 insertions(+), 38 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java index 3a9658ea816..206dd262ed8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java @@ -44,30 +44,14 @@ public class PluginsAndModules implements Writeable, ToXContent { } public PluginsAndModules(StreamInput in) throws IOException { - int pluginsSize = in.readInt(); - List plugins = new ArrayList<>(pluginsSize); - for (int i = 0; i < pluginsSize; i++) { - plugins.add(new PluginInfo(in)); - } - this.plugins = Collections.unmodifiableList(plugins); - int modulesSize = in.readInt(); - List modules = new ArrayList<>(modulesSize); - for (int i = 0; i < modulesSize; i++) { - modules.add(new PluginInfo(in)); - } - this.modules = Collections.unmodifiableList(modules); + this.plugins = Collections.unmodifiableList(in.readList(PluginInfo::new)); + this.modules = Collections.unmodifiableList(in.readList(PluginInfo::new)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeInt(plugins.size()); - for (PluginInfo plugin : getPluginInfos()) { - plugin.writeTo(out); - } - out.writeInt(modules.size()); - for (PluginInfo module : getModuleInfos()) { - module.writeTo(out); - } + out.writeList(plugins); + out.writeList(modules); } /** diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index d370a06cdc8..ca0bb4f3e80 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -36,7 +36,6 @@ import java.lang.management.PlatformManagedObject; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -226,11 +225,7 @@ public class JvmInfo implements Writeable, ToXContent { } bootClassPath = in.readString(); classPath = in.readString(); - systemProperties = new HashMap<>(); - int size = in.readInt(); - for (int i = 0; i < size; i++) { - systemProperties.put(in.readString(), in.readString()); - } + systemProperties = in.readMap(StreamInput::readString, StreamInput::readString); mem = new Mem(in); gcCollectors = in.readStringArray(); memoryPools = in.readStringArray(); @@ -257,7 +252,7 @@ public class JvmInfo implements Writeable, ToXContent { } out.writeString(bootClassPath); out.writeString(classPath); - out.writeInt(systemProperties.size()); + out.writeVInt(this.systemProperties.size()); for (Map.Entry entry : systemProperties.entrySet()) { out.writeString(entry.getKey()); out.writeString(entry.getValue()); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java index d30d872691c..70c0f2c9598 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -40,20 +39,12 @@ public class ThreadPoolInfo implements Writeable, Iterable, ToX } public ThreadPoolInfo(StreamInput in) throws IOException { - int size = in.readVInt(); - List infos = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - infos.add(new ThreadPool.Info(in)); - } - this.infos = Collections.unmodifiableList(infos); + this.infos = Collections.unmodifiableList(in.readList(ThreadPool.Info::new)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(infos.size()); - for (ThreadPool.Info info : infos) { - info.writeTo(out); - } + out.writeList(infos); } @Override From c0a0100308a2da0a4476ce6cfedf1cd5b3be1b82 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 20:15:56 +0200 Subject: [PATCH 17/40] [TEST] use single line ternary over more verbose ifs --- .../java/org/elasticsearch/monitor/os/OsInfoTests.java | 7 +------ .../java/org/elasticsearch/monitor/os/OsProbeTests.java | 7 +------ 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java index f96f6e47864..a7327fbdea6 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java @@ -30,12 +30,7 @@ public class OsInfoTests extends ESTestCase { public void testSerialization() throws IOException { int availableProcessors = randomIntBetween(1, 64); int allocatedProcessors = randomIntBetween(1, availableProcessors); - long refreshInterval; - if (randomBoolean()) { - refreshInterval = -1; - } else { - refreshInterval = randomPositiveLong(); - } + long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); String name = randomAsciiOfLengthBetween(3, 10); String arch = randomAsciiOfLengthBetween(3, 10); String version = randomAsciiOfLengthBetween(3, 10); diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 2674f726428..6f13d193a38 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -36,12 +36,7 @@ public class OsProbeTests extends ESTestCase { public void testOsInfo() { int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors()); - long refreshInterval; - if (randomBoolean()) { - refreshInterval = -1; - } else { - refreshInterval = randomPositiveLong(); - } + long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); OsInfo info = probe.osInfo(refreshInterval, allocatedProcessors); assertNotNull(info); assertEquals(refreshInterval, info.getRefreshInterval()); From e5a741ab67173d77ffde21225a77806cdd5fd900 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 20:23:31 +0200 Subject: [PATCH 18/40] fix line length in some touched classes --- .../src/main/resources/checkstyle_suppressions.xml | 6 ------ .../action/admin/cluster/node/info/NodeInfo.java | 4 ++-- .../org/elasticsearch/common/unit/ByteSizeValue.java | 7 +++++-- .../org/elasticsearch/plugins/DummyPluginInfo.java | 3 ++- .../org/elasticsearch/monitor/os/OsProbeTests.java | 5 +++-- .../nodesinfo/NodeInfoStreamingTests.java | 10 +++++----- .../threadpool/ThreadPoolSerializationTests.java | 11 +++++------ 7 files changed, 22 insertions(+), 24 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 51349520810..6b825728bf8 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -21,7 +21,6 @@ - @@ -301,7 +300,6 @@ - @@ -468,7 +466,6 @@ - @@ -903,8 +900,6 @@ - - @@ -992,7 +987,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 5389e95ad78..dd243cf6d88 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -81,8 +81,8 @@ public class NodeInfo extends BaseNodeResponse { public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Settings settings, @Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, - @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest, - @Nullable ByteSizeValue totalIndexingBuffer) { + @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, + @Nullable IngestInfo ingest, @Nullable ByteSizeValue totalIndexingBuffer) { super(node); this.version = version; this.build = build; diff --git a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 16f07926583..5944a8e06e6 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -177,7 +177,8 @@ public class ByteSizeValue implements Writeable { return parseBytesSizeValue(sValue, null, settingName); } - public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName) throws ElasticsearchParseException { + public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName) + throws ElasticsearchParseException { settingName = Objects.requireNonNull(settingName); if (sValue == null) { return defaultValue; @@ -215,7 +216,9 @@ public class ByteSizeValue implements Writeable { bytes = 0; } else { // Missing units: - throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", settingName, sValue); + throw new ElasticsearchParseException( + "failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", + settingName, sValue); } } catch (NumberFormatException e) { throw new ElasticsearchParseException("failed to parse [{}]", e, sValue); diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java index 5dab19581a3..90b1d32f4ae 100644 --- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -24,5 +24,6 @@ public class DummyPluginInfo extends PluginInfo { super(name, description, version, classname); } - public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName"); + public static final DummyPluginInfo INSTANCE = new DummyPluginInfo( + "dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName"); } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 6f13d193a38..e037c8cd9d6 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -51,8 +51,9 @@ public class OsProbeTests extends ESTestCase { OsStats stats = probe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); - assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); - double[] loadAverage = stats.getCpu().getLoadAverage(); + assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), + is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); + double[] loadAverage = stats.getCpu().loadAverage; if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); } diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 895cdebd68a..ffc88643bc3 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -76,7 +76,8 @@ public class NodeInfoStreamingTests extends ESTestCase { assertExpectedUnchanged(nodeInfo, readNodeInfo); } - // checks all properties that are expected to be unchanged. Once we start changing them between versions this method has to be changed as well + // checks all properties that are expected to be unchanged. + // Once we start changing them between versions this method has to be changed as well private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); @@ -120,12 +121,10 @@ public class NodeInfoStreamingTests extends ESTestCase { assertThat(param1Builder.string(), equalTo(param2Builder.string())); } - private NodeInfo createNodeInfo() { + private static NodeInfo createNodeInfo() { Build build = Build.CURRENT; DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), VersionUtils.randomVersion(random())); - Map serviceAttributes = new HashMap<>(); - serviceAttributes.put("test", "attribute"); Settings settings = Settings.builder().put("test", "setting").build(); OsInfo osInfo = DummyOsInfo.INSTANCE; ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()); @@ -134,7 +133,8 @@ public class NodeInfoStreamingTests extends ESTestCase { threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); Map profileAddresses = new HashMap<>(); - BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); + BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress( + new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index b0ac43cc5bc..bf0f5f6e606 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -40,11 +40,8 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -/** - * - */ public class ThreadPoolSerializationTests extends ESTestCase { - BytesStreamOutput output = new BytesStreamOutput(); + private final BytesStreamOutput output = new BytesStreamOutput(); private ThreadPool.ThreadPoolType threadPoolType; @Before @@ -54,7 +51,8 @@ public class ThreadPoolSerializationTests extends ESTestCase { } public void testThatQueueSizeSerializationWorks() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k")); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, + TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k")); output.setVersion(Version.CURRENT); info.writeTo(output); @@ -101,7 +99,8 @@ public class ThreadPoolSerializationTests extends ESTestCase { } public void testThatToXContentWritesInteger() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k")); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, + TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k")); XContentBuilder builder = jsonBuilder(); builder.startObject(); info.toXContent(builder, ToXContent.EMPTY_PARAMS); From 746632fcf9e2604588e9c62d2dd22813f09440e7 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 20:45:14 +0200 Subject: [PATCH 19/40] remove redundant serialization test for JvmInfo and OsInfo and expand existing NodeInfoStreamingTests --- .../org/elasticsearch/plugins/PluginInfo.java | 2 +- .../monitor/jvm/JvmInfoTests.java | 39 ----------- .../elasticsearch/monitor/os/OsInfoTests.java | 51 -------------- .../nodesinfo/NodeInfoStreamingTests.java | 68 ++++++++++++------- 4 files changed, 44 insertions(+), 116 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java index d2b5e0368c4..3e241eadd37 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -57,7 +57,7 @@ public class PluginInfo implements Writeable, ToXContent { * @param description Its description * @param version Version number */ - PluginInfo(String name, String description, String version, String classname) { + public PluginInfo(String name, String description, String version, String classname) { this.name = name; this.description = description; this.version = version; diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java index aaedec94154..131593cd114 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java @@ -21,12 +21,8 @@ package org.elasticsearch.monitor.jvm; import org.apache.lucene.util.Constants; import org.elasticsearch.bootstrap.JavaVersion; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; - public class JvmInfoTests extends ESTestCase { public void testUseG1GC() { @@ -61,39 +57,4 @@ public class JvmInfoTests extends ESTestCase { final int index = argline.lastIndexOf(flag); return argline.charAt(index - 1) == '+'; } - - public void testSerialization() throws IOException { - JvmInfo jvmInfo = JvmInfo.jvmInfo(); - try (BytesStreamOutput out = new BytesStreamOutput()) { - jvmInfo.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - JvmInfo deserializedJvmInfo = new JvmInfo(in); - assertEquals(jvmInfo.getVersion(), deserializedJvmInfo.getVersion()); - assertEquals(jvmInfo.getVmName(), deserializedJvmInfo.getVmName()); - assertEquals(jvmInfo.getVmVendor(), deserializedJvmInfo.getVmVendor()); - assertEquals(jvmInfo.getVmVersion(), deserializedJvmInfo.getVmVersion()); - assertEquals(jvmInfo.getBootClassPath(), deserializedJvmInfo.getBootClassPath()); - assertEquals(jvmInfo.getClassPath(), deserializedJvmInfo.getClassPath()); - assertEquals(jvmInfo.getPid(), deserializedJvmInfo.getPid()); - assertEquals(jvmInfo.getStartTime(), deserializedJvmInfo.getStartTime()); - assertEquals(jvmInfo.versionUpdatePack(), deserializedJvmInfo.versionUpdatePack()); - assertEquals(jvmInfo.versionAsInteger(), deserializedJvmInfo.versionAsInteger()); - assertEquals(jvmInfo.getMem().getDirectMemoryMax(), deserializedJvmInfo.getMem().getDirectMemoryMax()); - assertEquals(jvmInfo.getMem().getHeapInit(), deserializedJvmInfo.getMem().getHeapInit()); - assertEquals(jvmInfo.getMem().getHeapMax(), deserializedJvmInfo.getMem().getHeapMax()); - assertEquals(jvmInfo.getMem().getNonHeapInit(), deserializedJvmInfo.getMem().getNonHeapInit()); - assertEquals(jvmInfo.getMem().getNonHeapMax(), deserializedJvmInfo.getMem().getNonHeapMax()); - assertEquals(jvmInfo.getSystemProperties(), deserializedJvmInfo.getSystemProperties()); - assertArrayEquals(jvmInfo.getInputArguments(), deserializedJvmInfo.getInputArguments()); - assertArrayEquals(jvmInfo.getGcCollectors(), deserializedJvmInfo.getGcCollectors()); - assertArrayEquals(jvmInfo.getMemoryPools(), deserializedJvmInfo.getMemoryPools()); - assertEquals(jvmInfo.useCompressedOops(), deserializedJvmInfo.useCompressedOops()); - assertEquals(-1, deserializedJvmInfo.getConfiguredInitialHeapSize()); - assertEquals(-1, deserializedJvmInfo.getConfiguredMaxHeapSize()); - assertEquals("unknown", deserializedJvmInfo.useG1GC()); - assertNull(deserializedJvmInfo.onError()); - assertNull(deserializedJvmInfo.onOutOfMemoryError()); - } - } - } } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java deleted file mode 100644 index a7327fbdea6..00000000000 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsInfoTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.monitor.os; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -public class OsInfoTests extends ESTestCase { - - public void testSerialization() throws IOException { - int availableProcessors = randomIntBetween(1, 64); - int allocatedProcessors = randomIntBetween(1, availableProcessors); - long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); - String name = randomAsciiOfLengthBetween(3, 10); - String arch = randomAsciiOfLengthBetween(3, 10); - String version = randomAsciiOfLengthBetween(3, 10); - OsInfo osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); - try (BytesStreamOutput out = new BytesStreamOutput()) { - osInfo.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - OsInfo deserializedOsInfo = new OsInfo(in); - assertEquals(osInfo.getRefreshInterval(), deserializedOsInfo.getRefreshInterval()); - assertEquals(osInfo.getAvailableProcessors(), deserializedOsInfo.getAvailableProcessors()); - assertEquals(osInfo.getAllocatedProcessors(), deserializedOsInfo.getAllocatedProcessors()); - assertEquals(osInfo.getName(), deserializedOsInfo.getName()); - assertEquals(osInfo.getArch(), deserializedOsInfo.getArch()); - assertEquals(osInfo.getVersion(), deserializedOsInfo.getVersion()); - } - } - } -} diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index ffc88643bc3..bfc2cc2d3ec 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.nodesinfo; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -35,11 +34,11 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.ingest.IngestInfo; +import org.elasticsearch.ingest.ProcessorInfo; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.monitor.os.DummyOsInfo; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.process.ProcessInfo; -import org.elasticsearch.plugins.DummyPluginInfo; +import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -48,7 +47,6 @@ import org.elasticsearch.transport.TransportInfo; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -58,23 +56,17 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.core.IsEqual.equalTo; -/** - * - */ public class NodeInfoStreamingTests extends ESTestCase { public void testNodeInfoStreaming() throws IOException { NodeInfo nodeInfo = createNodeInfo(); - Version version = Version.CURRENT; - BytesStreamOutput out = new BytesStreamOutput(); - out.setVersion(version); - nodeInfo.writeTo(out); - out.close(); - StreamInput in = out.bytes().streamInput(); - in.setVersion(version); - NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); - assertExpectedUnchanged(nodeInfo, readNodeInfo); - + try (BytesStreamOutput out = new BytesStreamOutput()) { + nodeInfo.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); + assertExpectedUnchanged(nodeInfo, readNodeInfo); + } + } } // checks all properties that are expected to be unchanged. // Once we start changing them between versions this method has to be changed as well @@ -126,22 +118,48 @@ public class NodeInfoStreamingTests extends ESTestCase { DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), VersionUtils.randomVersion(random())); Settings settings = Settings.builder().put("test", "setting").build(); - OsInfo osInfo = DummyOsInfo.INSTANCE; + int availableProcessors = randomIntBetween(1, 64); + int allocatedProcessors = randomIntBetween(1, availableProcessors); + long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); + String name = randomAsciiOfLengthBetween(3, 10); + String arch = randomAsciiOfLengthBetween(3, 10); + String version = randomAsciiOfLengthBetween(3, 10); + OsInfo osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()); JvmInfo jvm = JvmInfo.jvmInfo(); - List threadPoolInfos = new ArrayList<>(); - threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); + int numThreadPools = randomIntBetween(1, 10); + List threadPoolInfos = new ArrayList<>(numThreadPools); + for (int i = 0; i < numThreadPools; i++) { + threadPoolInfos.add(new ThreadPool.Info(randomAsciiOfLengthBetween(3, 10), + randomFrom(ThreadPool.ThreadPoolType.values()), randomInt())); + } ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); Map profileAddresses = new HashMap<>(); BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress( new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); - HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); + HttpInfo httpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); - PluginsAndModules plugins = new PluginsAndModules(Collections.singletonList(DummyPluginInfo.INSTANCE), - Collections.singletonList(DummyPluginInfo.INSTANCE)); - IngestInfo ingestInfo = new IngestInfo(Collections.emptyList()); + int numPlugins = randomIntBetween(0, 5); + List plugins = new ArrayList<>(); + for (int i = 0; i < numPlugins; i++) { + plugins.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), + randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + } + int numModules = randomIntBetween(0, 5); + List modules = new ArrayList<>(); + for (int i = 0; i < numModules; i++) { + modules.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), + randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + } + PluginsAndModules pluginsAndModules = new PluginsAndModules(plugins, modules); + int numProcessors = randomIntBetween(0, 5); + List processors = new ArrayList<>(numProcessors); + for (int i = 0; i < numProcessors; i++) { + processors.add(new ProcessorInfo(randomAsciiOfLengthBetween(3, 10))); + } + IngestInfo ingestInfo = new IngestInfo(processors); ByteSizeValue indexingBuffer; if (random().nextBoolean()) { indexingBuffer = null; @@ -150,6 +168,6 @@ public class NodeInfoStreamingTests extends ESTestCase { indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L<<40)-1)); } return new NodeInfo(VersionUtils.randomVersion(random()), build, node, settings, osInfo, process, jvm, - threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer); + threadPoolInfo, transport, httpInfo, pluginsAndModules, ingestInfo, indexingBuffer); } } From 7c03f65c3622fcd94f8821d1cc9208aea7ab5005 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 1 Sep 2016 21:07:53 +0200 Subject: [PATCH 20/40] [TEST] adjusted EsTestCase#randomPositiveLong --- .../main/java/org/elasticsearch/test/ESTestCase.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index f8098420d54..0e411a6efc8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -303,11 +303,11 @@ public abstract class ESTestCase extends LuceneTestCase { } public static long randomPositiveLong() { - long positiveLong = randomLong(); - while (positiveLong == Long.MIN_VALUE) { - positiveLong = randomLong(); - } - return Math.abs(positiveLong); + long randomLong; + do { + randomLong = randomLong(); + } while (randomLong == Long.MIN_VALUE); + return Math.abs(randomLong); } public static float randomFloat() { From 51620f755b743bb23757787f12ef8bdd2c40a4c1 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 2 Sep 2016 09:46:43 +0200 Subject: [PATCH 21/40] [TEST] expand NodeInfoStreamingTests to also test serialization of nullable values --- .../nodesinfo/NodeInfoStreamingTests.java | 109 +++++++++--------- 1 file changed, 56 insertions(+), 53 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index bfc2cc2d3ec..d9134ba5cf3 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -82,24 +82,15 @@ public class NodeInfoStreamingTests extends ESTestCase { compareJsonOutput(nodeInfo.getTransport(), readNodeInfo.getTransport()); compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs()); - comparePluginsAndModules(nodeInfo, readNodeInfo); + compareJsonOutput(nodeInfo.getPlugins(), readNodeInfo.getPlugins()); compareJsonOutput(nodeInfo.getIngest(), readNodeInfo.getIngest()); } - private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { - ToXContent.Params params = ToXContent.EMPTY_PARAMS; - XContentBuilder pluginsAndModules = jsonBuilder(); - pluginsAndModules.startObject(); - nodeInfo.getPlugins().toXContent(pluginsAndModules, params); - pluginsAndModules.endObject(); - XContentBuilder readPluginsAndModules = jsonBuilder(); - readPluginsAndModules.startObject(); - readNodeInfo.getPlugins().toXContent(readPluginsAndModules, params); - readPluginsAndModules.endObject(); - assertThat(pluginsAndModules.string(), equalTo(readPluginsAndModules.string())); - } - private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException { + if (param1 == null) { + assertNull(param2); + return; + } ToXContent.Params params = ToXContent.EMPTY_PARAMS; XContentBuilder param1Builder = jsonBuilder(); param1Builder.startObject(); @@ -117,53 +108,65 @@ public class NodeInfoStreamingTests extends ESTestCase { Build build = Build.CURRENT; DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), VersionUtils.randomVersion(random())); - Settings settings = Settings.builder().put("test", "setting").build(); - int availableProcessors = randomIntBetween(1, 64); - int allocatedProcessors = randomIntBetween(1, availableProcessors); - long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); - String name = randomAsciiOfLengthBetween(3, 10); - String arch = randomAsciiOfLengthBetween(3, 10); - String version = randomAsciiOfLengthBetween(3, 10); - OsInfo osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); - ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()); - JvmInfo jvm = JvmInfo.jvmInfo(); - int numThreadPools = randomIntBetween(1, 10); - List threadPoolInfos = new ArrayList<>(numThreadPools); - for (int i = 0; i < numThreadPools; i++) { - threadPoolInfos.add(new ThreadPool.Info(randomAsciiOfLengthBetween(3, 10), - randomFrom(ThreadPool.ThreadPoolType.values()), randomInt())); + Settings settings = randomBoolean() ? null : Settings.builder().put("test", "setting").build(); + OsInfo osInfo = null; + if (randomBoolean()) { + int availableProcessors = randomIntBetween(1, 64); + int allocatedProcessors = randomIntBetween(1, availableProcessors); + long refreshInterval = randomBoolean() ? -1 : randomPositiveLong(); + String name = randomAsciiOfLengthBetween(3, 10); + String arch = randomAsciiOfLengthBetween(3, 10); + String version = randomAsciiOfLengthBetween(3, 10); + osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); + } + ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()); + JvmInfo jvm = randomBoolean() ? null : JvmInfo.jvmInfo(); + ThreadPoolInfo threadPoolInfo = null; + if (randomBoolean()) { + int numThreadPools = randomIntBetween(1, 10); + List threadPoolInfos = new ArrayList<>(numThreadPools); + for (int i = 0; i < numThreadPools; i++) { + threadPoolInfos.add(new ThreadPool.Info(randomAsciiOfLengthBetween(3, 10), + randomFrom(ThreadPool.ThreadPoolType.values()), randomInt())); + } + threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); } - ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); Map profileAddresses = new HashMap<>(); BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress( new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); profileAddresses.put("test_address", dummyBoundTransportAddress); - TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); - HttpInfo httpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); + TransportInfo transport = randomBoolean() ? null : new TransportInfo(dummyBoundTransportAddress, profileAddresses); + HttpInfo httpInfo = randomBoolean() ? null : new HttpInfo(dummyBoundTransportAddress, randomLong()); - int numPlugins = randomIntBetween(0, 5); - List plugins = new ArrayList<>(); - for (int i = 0; i < numPlugins; i++) { - plugins.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), - randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + PluginsAndModules pluginsAndModules = null; + if (randomBoolean()) { + int numPlugins = randomIntBetween(0, 5); + List plugins = new ArrayList<>(); + for (int i = 0; i < numPlugins; i++) { + plugins.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), + randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + } + int numModules = randomIntBetween(0, 5); + List modules = new ArrayList<>(); + for (int i = 0; i < numModules; i++) { + modules.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), + randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + } + pluginsAndModules = new PluginsAndModules(plugins, modules); } - int numModules = randomIntBetween(0, 5); - List modules = new ArrayList<>(); - for (int i = 0; i < numModules; i++) { - modules.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), - randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + + IngestInfo ingestInfo = null; + if (randomBoolean()) { + int numProcessors = randomIntBetween(0, 5); + List processors = new ArrayList<>(numProcessors); + for (int i = 0; i < numProcessors; i++) { + processors.add(new ProcessorInfo(randomAsciiOfLengthBetween(3, 10))); + } + ingestInfo = new IngestInfo(processors); } - PluginsAndModules pluginsAndModules = new PluginsAndModules(plugins, modules); - int numProcessors = randomIntBetween(0, 5); - List processors = new ArrayList<>(numProcessors); - for (int i = 0; i < numProcessors; i++) { - processors.add(new ProcessorInfo(randomAsciiOfLengthBetween(3, 10))); - } - IngestInfo ingestInfo = new IngestInfo(processors); - ByteSizeValue indexingBuffer; - if (random().nextBoolean()) { - indexingBuffer = null; - } else { + + ByteSizeValue indexingBuffer = null; + if (randomBoolean()) { // pick a random long that sometimes exceeds an int: indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L<<40)-1)); } From 52581d2df6e66bf744d7eb676aba5b4072e14265 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 2 Sep 2016 10:27:59 +0200 Subject: [PATCH 22/40] [TEST] fix bad merge --- .../test/java/org/elasticsearch/monitor/os/OsProbeTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index e037c8cd9d6..2e085a80700 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -53,7 +53,7 @@ public class OsProbeTests extends ESTestCase { assertThat(stats.getTimestamp(), greaterThan(0L)); assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); - double[] loadAverage = stats.getCpu().loadAverage; + double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); } From 234ca6b1fd2a2b79733af555e33e953aa303975c Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 2 Sep 2016 10:54:05 +0200 Subject: [PATCH 23/40] Fix bad link in plugin docs --- docs/plugins/lang-javascript.asciidoc | 2 +- docs/plugins/lang-python.asciidoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugins/lang-javascript.asciidoc b/docs/plugins/lang-javascript.asciidoc index 4bc6ba94ed2..650f42d9cab 100644 --- a/docs/plugins/lang-javascript.asciidoc +++ b/docs/plugins/lang-javascript.asciidoc @@ -1,7 +1,7 @@ [[lang-javascript]] === JavaScript Language Plugin -deprecated[5.0.0,JavaScript will be replaced by the new scripting language <>] +deprecated[5.0.0,JavaScript will be replaced by the new scripting language {ref}/modules-scripting-painless.html[`Painless`]] The JavaScript language plugin enables the use of JavaScript in Elasticsearch scripts, via Mozilla's diff --git a/docs/plugins/lang-python.asciidoc b/docs/plugins/lang-python.asciidoc index 4ad0fe628cb..fcec532ea10 100644 --- a/docs/plugins/lang-python.asciidoc +++ b/docs/plugins/lang-python.asciidoc @@ -1,7 +1,7 @@ [[lang-python]] === Python Language Plugin -deprecated[5.0.0,Python will be replaced by the new scripting language <>] +deprecated[5.0.0,Python will be replaced by the new scripting language {ref}/modules-scripting-painless.html[`Painless`]] The Python language plugin enables the use of Python in Elasticsearch scripts, via the http://www.jython.org/[Jython] Java implementation of Python. From cdc27b75b842b7b3c9a3c5515b5219cb425995a1 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 2 Sep 2016 13:30:36 +0200 Subject: [PATCH 24/40] Add more information to the how-to docs. #20297 - use auto-generated ids for indexing #20211 - use rounded dates in queries #20115 --- docs/reference/how-to/indexing-speed.asciidoc | 9 ++ docs/reference/how-to/search-speed.asciidoc | 118 ++++++++++++++++++ 2 files changed, 127 insertions(+) diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index bb5a367a04c..50187af5b28 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -67,6 +67,15 @@ The filesystem cache will be used in order to buffer I/O operations. You should make sure to give at least half the memory of the machine running elasticsearch to the filesystem cache. +[float] +=== Use auto-generated ids + +When indexing a document that has an explicit id, elasticsearch needs to check +whether a document with the same id already exists within the same shard, which +is a costly operation and gets even more costly as the index grows. By using +auto-generated ids, Elasticsearch can skip this check, which makes indexing +faster. + [float] === Use faster hardware diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index 67848c9edca..2d0525a48e8 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -140,6 +140,124 @@ being mapped as <> rather than `integer` or `long`. In general, scripts should be avoided. If they are absolutely needed, you should prefer the `painless` and `expressions` engines. +[float] +=== Search rounded dates + +Queries on date fields that use `now` are typically not cacheable since the +range that is being matched changes all the time. However switching to a +rounded date is often acceptable in terms of user experience, and has the +benefit of making better use of the query cache. + +For instance the below query: + +[source,js] +-------------------------------------------------- +PUT index/type/1 +{ + "my_date": "2016-05-11T16:30:55.328Z" +} + +GET index/_search +{ + "query": { + "constant_score": { + "filter": { + "range": { + "my_date": { + "gte": "now-1h", + "lte": "now" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +could be replaced with the following query: + +[source,js] +-------------------------------------------------- +GET index/_search +{ + "query": { + "constant_score": { + "filter": { + "range": { + "my_date": { + "gte": "now-1h/m", + "lte": "now/m" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +In that case we rounded to the minute, so if the current time is `16:31:29`, +the range query will match everything whose value of the `my_date` field is +between `15:31:00` and `16:31:59`. And if several users run a query that +contains this range in the same minute, the query cache could help speed things +up a bit. The longer the interval that is used for rounding, the more the query +cache can help, but beware that too aggressive rounding might also hurt user +experience. + + +NOTE: It might be tempting to split ranges into a large cacheable part and +smaller not cacheable parts in order to be able to leverage the query cache, +as shown below: + +[source,js] +-------------------------------------------------- +GET index/_search +{ + "query": { + "constant_score": { + "filter": { + "bool": { + "should": [ + { + "range": { + "my_date": { + "gte": "now-1h", + "lte": "now-1h/m" + } + } + }, + { + "range": { + "my_date": { + "gt": "now-1h/m", + "lt": "now/m" + } + } + }, + { + "range": { + "my_date": { + "gte": "now/m", + "lte": "now" + } + } + } + ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +However such practice might make the query run slower in some cases since the +overhead introduced by the `bool` query may defeat the savings from better +leveraging the query cache. + [float] === Force-merge read-only indices From 639b7278d992533caee77b95d8219b428d9f7337 Mon Sep 17 00:00:00 2001 From: Greg Ichneumon Brown Date: Fri, 2 Sep 2016 05:41:07 -0700 Subject: [PATCH 25/40] Docs: clarify calculation of sigma and lambda in function_score (#20267) - Using log() to indicate natural log can add some confusion when trying to further adjust/tweak scores. Other parts of the API (field_value_factor on this same page) use 'ln' and 'log', so this change should be more consistent - Fixes #20027 - I generated the images using http://latex2png.com/ at a resolution of 150 which seemed to be about the same size as before --- docs/reference/images/lambda_calc.png | Bin 920 -> 1419 bytes docs/reference/images/sigma_calc.png | Bin 1236 -> 1731 bytes .../query-dsl/function-score-query.asciidoc | 2 ++ 3 files changed, 2 insertions(+) diff --git a/docs/reference/images/lambda_calc.png b/docs/reference/images/lambda_calc.png index 2d7f8bbb8db4cceac01a0221016f3db03deb554c..4fd19a2660f216b8c5d7acbf87d0283c99eb6aca 100644 GIT binary patch literal 1419 zcmZ`(c{J2(82*_|c1@kKbh#MIorq>(8YIhD##LDwrE3h6VT_DnW+(|=$x_)S3fG!l zE?GK6LMnvFa*Z`%DA%q=GOqj6KliWuo%4Rr_dM_WeD8P8cg`EtJf{*~P-~j+eX#gM}klEmP3QRyQ+u2wGyD^E~%^;HqAY5XDNca)fomWnS zt$wk!G$*>WEu~K+XX0S(5u+?X+g-Gs+VYomz?uCxQHBnD z-c!3J*he5p^UB4ZZw5?zB@OPZ_Pl(T@3~*Y?#BmI{H;LMRWF}ITyF-?i=%V7E zwdY5Vf5hXFHC#sqe*lu0?s!&@y%7y_u(mKx!}%4p*GD9MhJX94ipnyevV50sZ|%b;PVAelxQ zF@lb!T9d4zg;knJ;-E@Z8$zo4@m6mf-L*X4N>8R~a zZt>!&`!9X!rH6*Yk2g!f`p@vIE=DKyI7hvLq&jxX=&9G6%)Uv>9v}JeWr5DJ8*#~3 ziL3Ovn~fIWj%15W2d=M&szE|M@=U%!!6yx`H%9BlKC5cf;&hjW+9#Bsj8o^{!Y^NA z@exBwNn9?^nKrM%@J(vMZ%Urd`;ZX79jb3JgN^8N2wSS;t4!I|nz3Eg?HAgiRq`Zk zfK8GAz3R#L9Z!$9y;B@Df8U|dcCsa^v?efJwPLt6^K{xE=S3inrS8 z*~fmTZ6Py8*pNbo#Hg{GcXw+=7~Z%LKjwC@qa{y}9%@`n?|mTHM{_Ovt&-QQvKG(3 ze=!>$GF_lAkuF&f4qb2wRVlV#{TMJ^H$DjCRv_H?_4OOo%M-eHKZ)FrbW+s{pc^MD zeltd4@H!3(IY+}sEay_J;LY{tWwYsQfd$chyGnRf`_WXsVUXc@_S6GMou1d%V~ak` ztKjS&ANslRng{bhBmWGLdkEq?W$rtLNnV4eZZ3_;* zD|a*vF@;0w1q#dv=tAy}OSqLerP0mowbDo@g7Z)|wL0HGq!i8?xl}WMcyL{xEK<1j z789{buzQ1ZZgy=(h&KKgCplbftF^;jn%|IoUJX6jAf4*4blYEtsqe z`{r(eZMPHRzGt1*y&<-}6bQeb+4eU$J2W{~E*vU_<7MQ@@&W-sBT-mgBu*EDA|jDS zXsnUGfev^;A|u9(_Qu+O1_qN#_YV1ggKTtUJ!nwb>);1oeGMS_Q5cu06jc*dKYs+( z%aamv>;&7B;^*aRSO<Z)MfW{mKU4p7sR16{Q;B#@{YCX Q9GC%Yt zmjw9*Gbrs&b9vu6)97y5+0_CeZjqkv7#Ns2JY5_^ECiQM_RbIv6lwdPqBhgVXmVRu zYtX{N#oDPGE_gYdU3B+~lXA!Iqy-8Qt9n*wh#IczS{)V;xKKzpgmX*S?toX#KO$Zp z+Q{MRY84>z=FqZRQzu;e?8V-u!0z1txpOSP-;sV$@B8mK%R!G``FDCRxJ?W8 z1~M|~=WhRWxUi3Jw`uzN$a9|-$^2L^R$IAJhGF*I_)jUaYedA#GbXy+mHPfy^?yKS za8zh{obJ;TsZoX>wniL&m!Rm=>3QSjp-pDfpK^$P*?B@r|Lv|L$Fis1dGzc159gI! zLDROcW!N8fgk!s(k8x7b%sCpCt0HfShCj>Auw>hEcdmWwT&Fp`a~bW9UOJ)UATMG3 z<=>Qf4jO(}4hgCVW|&>(t{3~2(Wdn0ocI*=Lvmb>rcCEAWVE(3cXn6!95hy1f8aC2 z#q%E(S6_V9_j~FR6P~jQw+|N1F~0a)uu@Wy*;i2C)#G$h+03jBNoT*zieRkbaLTmp zpCId(=ge?<#%8mhJZD?%IbQ^RlAEDWbm)~*MPI(i#h)Q(^n!aAax7^P60Iy|xw}TT zduH_`&r_8(?6Y*|_bNzBY&n_VIg_G7xa_Rn(kQvaszJs7jV)a@_Bape`;w$A-~qG-mB zrJE&nqCA8TSu<4|gv4sqyXCNH+m;oV{M47<=4%CtW z4HR0Cr2?Xs0umXPp<+?cs%!*-fKdC=*l+KL_gvRG_jBLR^?ROgC+{@gQyz|h0|4Z` zAgmt%5|ZLtO-5S$c4azn#UdT;>f;JPGe_=AC`{}nXnvkaDq1eeLLT71Wh5o%bVZ1W{ zFkdgMYe3BO>xz}68e0XnHkk}eDAKh;a8=j#ro@9m5SgnO z_qCQVKknyUwtDpDoaH1gJ0>gn;V)+z2uLe`4s_XQ%W*aTV-I{78*^doGI^}qmNbP9Uk|$!#xu$8GyP1UDSG?Otu(mV$jbYNF38r=N zg@H40kp+Y2T(RG-HTX-q7uz-+^^->R{gxkU;zv+UPI^2&9i6JQLFUo8xXXgmr-KHy z?1}u|<$)a>8YM&cQ-K-$y%F_Vs6N#A5 z{M0i?G;_!VLuF&eHzZ-l#~S#rh@(4CBqmE1|5k<{?HFUYPq(zU2;v*U!SeNuAnv6S zg5*c0%)Pf~_bhvb?ka1x`4IwK?)zih`VWT6`q^sl2?;w1&C}GW)$;tpl1urL)~nvE zGHT5iF3LZpIc}8W>D!)i;w1|mx(XQ=OQ?0Ij9;9kLPfW_1eCC@g#5Q>unCK;HFCKSY|M5803u@xU}?*kZ}>_QVZ;rP30G%HQ5bFHX4U z@yH}Coh)wROYT79skF}SV7yj6T~p{-)YcYR{KuzMX4kOnF23Kcqw!q}t3y1M zVK30u4R?ulo>)q%{)b{E1aPsq%EMkO}GzyL5XNGU}N#mIR9XSJq`SCZ2ke)*==iE3f#BEU^-F4PTvn)CdhI7O%{{=T)i3J-slctox(yx=MbbiX;L(eX;dtRj;ZuC@#A2`0iMukSN*rgu(JLOa9afc=R z{PvvE-R{1E!tR90_U|M-3d0V*qIc2E_4WfTg6ZpP#!V@wW}hu*oX2StK^`WU%e1Ds z^lR06GOU{UMatv?t~YbTuBZy(wk}!J_5hcgi(_wf-1&0eQ||Tjp@2uKS}%4ta7H>j z*OnJi3s`tS&N7B5eCW8rqFB{9wvH1NWHl47GIoy^EIbN(kP^4dMqUYSXiU; SamQ!H9pL4G$2Pi!T=)lQzW#my literal 1236 zcmV;_1S|WAP)-#_L z&B}nN?YDKe`|!gkO|mjWookm*Y4PpGH{btM`G$CT=p5zwC-F%R5)QWCu5OOkEw%M<0-bRhf6lLq zju)f#cc13)x(O=FgO}5+Y?XHsUdeOM5!yKa2lekgKNQFb=b+;BF(`;p`#Ee__Vp

pEKBFCTJB7TaSCzs zhXesPulu^@0~}c79v_6mI@3tOG=| zG!_>@)AX|2~MVCYn!{Q8iT39i9&u z>z-$O4l_mzEfw6sD#+Ciw~~{Kws{RUVmD`+K%(&ZlTOi?U=C@CiL?dM=1L)j2@kA7 z$GIk#zeAm1dLilgI$`z5Jb3uLq2+x}DYM7S*CF#NxPw)Ys~yfSDamDtBx?kK@UK^? zgMKzU?wBxlNK+^{Hy#N>xt*Dlwv^F3z~AGk#ZCuR4Z_dr?Z!GH=??@kY3-bGTrnhX z^k>*Fa`;;8lsh}$Zs-rV%u(NS;2sFl9OE$oVZ%0xy??iI+w=J2dUpl>L!bip1*cTx y+D|z4chKlv=DnKrvC$SNFxx=VNqfL0000> for graphs demonstrating the curve generated by the `gauss` function. @@ -374,6 +375,7 @@ image:images/Exponential.png[] where again the parameter image:images/lambda.png[] is computed to assure that the score takes the value `decay` at distance `scale` from `origin`+-`offset` +// \lambda = ln(decay)/scale image:images/lambda_calc.png[] See <> for graphs demonstrating the curve generated by the `exp` function. From 3a13f54755f78f425a8cc672b5d879593063be7c Mon Sep 17 00:00:00 2001 From: Masaru Hasegawa Date: Fri, 2 Sep 2016 18:07:26 +0900 Subject: [PATCH 26/40] query_string_query should take term length into consideration when fuzziness is auto Fixes #15972 --- .../classic/MapperQueryParser.java | 3 +- .../query/QueryStringQueryBuilderTests.java | 33 +++++++++++++++++-- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 9bbe08208d7..bcf0a2b201a 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -102,7 +102,6 @@ public class MapperQueryParser extends QueryParser { setLowercaseExpandedTerms(settings.lowercaseExpandedTerms()); setPhraseSlop(settings.phraseSlop()); setDefaultOperator(settings.defaultOperator()); - setFuzzyMinSim(settings.fuzziness().asFloat()); setFuzzyPrefixLength(settings.fuzzyPrefixLength()); setLocale(settings.locale()); } @@ -114,7 +113,7 @@ public class MapperQueryParser extends QueryParser { @Override Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException { if (fuzzySlop.image.length() == 1) { - return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim)); + return getFuzzyQuery(qfield, termImage, Float.toString(settings.fuzziness().asDistance(termImage))); } return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index be77ba00734..ce49f18ccfc 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -26,15 +26,16 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.SynonymQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.unit.Fuzziness; @@ -390,6 +391,32 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); + + int length = randomIntBetween(1, 10); + StringBuilder queryString = new StringBuilder(); + for (int i = 0; i < length; i++) { + queryString.append("a"); + } + queryString.append("~"); + + int expectedEdits; + if (length <= 2) { + expectedEdits = 0; + } else if (3 <= length && length <= 5) { + expectedEdits = 1; + } else { + expectedEdits = 2; + } + + Query query = queryStringQuery(queryString.toString()).defaultField(STRING_FIELD_NAME).fuzziness(Fuzziness.AUTO) + .toQuery(createShardContext()); + assertThat(query, instanceOf(FuzzyQuery.class)); + FuzzyQuery fuzzyQuery = (FuzzyQuery) query; + assertEquals(expectedEdits, fuzzyQuery.getMaxEdits()); + } + public void testFuzzyNumeric() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); QueryStringQueryBuilder query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME); From 4885709e101fb99d597dc8202f6e5fe82f7faf37 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 2 Sep 2016 16:08:34 +0200 Subject: [PATCH 27/40] Update rollover-index.asciidoc Fixed weirdly formatted callouts in rollover docs --- docs/reference/indices/rollover-index.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 5e3f744ebaf..6ee28e7b2f4 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -55,9 +55,9 @@ The above request might return the following response: } -------------------------------------------------- // TESTRESPONSE - <1> Whether the index was rolled over. - <2> Whether the rollover was dry run. - <3> The result of each condition. +<1> Whether the index was rolled over. +<2> Whether the rollover was dry run. +<3> The result of each condition. [float] === Naming the new index From 549ca3178bb6151b080a97eac6093bf8af459a99 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 2 Sep 2016 10:09:51 -0400 Subject: [PATCH 28/40] Rename method in OldIndexUtils loadIndexList -> loadDataFilesList. The new method name is more accurate. --- .../bwcompat/OldIndexBackwardsCompatibilityIT.java | 4 ++-- .../src/main/java/org/elasticsearch/test/OldIndexUtils.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 45c89062a5a..3d35c42adcf 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -103,8 +103,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { @Before public void initIndexesList() throws Exception { - indexes = OldIndexUtils.loadIndexesList("index", getBwcIndicesPath()); - unsupportedIndexes = OldIndexUtils.loadIndexesList("unsupported", getBwcIndicesPath()); + indexes = OldIndexUtils.loadDataFilesList("index", getBwcIndicesPath()); + unsupportedIndexes = OldIndexUtils.loadDataFilesList("unsupported", getBwcIndicesPath()); } @AfterClass diff --git a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java index e93f355d64c..fe46251e3ee 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java @@ -61,7 +61,7 @@ import static org.junit.Assert.assertEquals; public class OldIndexUtils { - public static List loadIndexesList(String prefix, Path bwcIndicesPath) throws IOException { + public static List loadDataFilesList(String prefix, Path bwcIndicesPath) throws IOException { List indexes = new ArrayList<>(); try (DirectoryStream stream = Files.newDirectoryStream(bwcIndicesPath, prefix + "-*.zip")) { for (Path path : stream) { From af215b528fc13f316db60153e2a3e4b8cd6bfac9 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Fri, 2 Sep 2016 12:32:55 -0400 Subject: [PATCH 29/40] move completion performance tips from migration docs to completion docs --- .../migration/migrate_5_0/suggest.asciidoc | 34 ++------------ .../suggesters/completion-suggest.asciidoc | 44 ++++++++++++++----- 2 files changed, 37 insertions(+), 41 deletions(-) diff --git a/docs/reference/migration/migrate_5_0/suggest.asciidoc b/docs/reference/migration/migrate_5_0/suggest.asciidoc index 822dcd45700..6979e8718e3 100644 --- a/docs/reference/migration/migrate_5_0/suggest.asciidoc +++ b/docs/reference/migration/migrate_5_0/suggest.asciidoc @@ -3,7 +3,8 @@ The completion suggester has undergone a complete rewrite. This means that the syntax and data structure for fields of type `completion` have changed, as -have the syntax and response of completion suggester requests. +have the syntax and response of completion suggester requests. See +<> for details. For indices created before Elasticsearch 5.0.0, `completion` fields and the completion suggester will continue to work as they did in Elasticsearch 2.x. @@ -28,7 +29,7 @@ to suggestion entries for both context and completion suggesters. Suggestions are aware of the document they belong to. Now, associated documents (`_source`) are returned as part of `completion` suggestions. -WARNING: `_source` meta-field must be enabled, which is the default behavior, +IMPORTANT: `_source` meta-field must be enabled, which is the default behavior, to enable returning `_source` with suggestions. Previously, `context` and `completion` suggesters supported an index-time @@ -37,35 +38,6 @@ Now metadata can be stored as part of the the same document as the suggestion for retrieval at query-time. The support for index-time `payloads` has been removed to avoid bloating the in-memory index with suggestion metadata. -In case of completion queries spanning more than one shard, the suggest is executed -in two phases, where the last phase fetches the relevant documents from shards, -implying executing completion requests against a single shard is more performant -due to the document fetch overhead when the suggest spans multiple shards. -To get best performance for completions, it is recommended to index completions -into a single shard index. In case of high heap usage due to shard size, it is still -recommended to break index into multiple shards instead of optimizing for completion -performance. - -Completion results return the full document `_source` by default. The size of -the `_source` can impact performance due to disk fetch and network transport overhead. -For best performance, filter out unnecessary fields from the `_source` using -<> to minimize `_source` size. -The following demonstrates an example completion query with source filtering: - -[source,js] --------------------------------------------------- -POST music/_suggest -{ - "_source": "completion.*", - "song-suggest" : { - "prefix" : "nir", - "completion" : { - "field" : "suggest" - } - } -} --------------------------------------------------- - ==== Simpler completion indexing As suggestions are document-oriented, suggestion metadata (e.g. `output`) diff --git a/docs/reference/search/suggesters/completion-suggest.asciidoc b/docs/reference/search/suggesters/completion-suggest.asciidoc index 587cdf86bd7..60065ce96bb 100644 --- a/docs/reference/search/suggesters/completion-suggest.asciidoc +++ b/docs/reference/search/suggesters/completion-suggest.asciidoc @@ -194,26 +194,50 @@ returns this response: -------------------------------------------------- // TESTRESPONSE -The configured weight for a suggestion is returned as `_score`. -The `text` field uses the `input` of your indexed suggestion. -Suggestions are document oriented, the document source is -returned in `_source`. <> -parameters are supported for filtering the document source. + +IMPORTANT: `_source` meta-field must be enabled, which is the default +behavior, to enable returning `_source` with suggestions. + +The configured weight for a suggestion is returned as `_score`. The +`text` field uses the `input` of your indexed suggestion. Suggestions +return the full document `_source` by default. The size of the `_source` +can impact performance due to disk fetch and network transport overhead. +For best performance, filter out unnecessary fields from the `_source` +using <> to minimize +`_source` size. The following demonstrates an example completion query +with source filtering: + +[source,js] +-------------------------------------------------- +POST music/_suggest +{ + "_source": "completion.*", + "song-suggest" : { + "prefix" : "nir", + "completion" : { + "field" : "suggest" + } + } +} +-------------------------------------------------- The basic completion suggester query supports the following parameters: `field`:: The name of the field on which to run the query (required). `size`:: The number of suggestions to return (defaults to `5`). -`payload`:: The name of the field or field name array to be returned - as payload (defaults to no fields). NOTE: The completion suggester considers all documents in the index. See <> for an explanation of how to query a subset of documents instead. -NOTE: Specifying `payload` fields will incur additional search performance -hit. The `payload` fields are retrieved eagerly (single pass) for top -suggestions at the shard level using field data or from doc values. +NOTE: In case of completion queries spanning more than one shard, the suggest +is executed in two phases, where the last phase fetches the relevant documents +from shards, implying executing completion requests against a single shard is +more performant due to the document fetch overhead when the suggest spans +multiple shards. To get best performance for completions, it is recommended to +index completions into a single shard index. In case of high heap usage due to +shard size, it is still recommended to break index into multiple shards instead +of optimizing for completion performance. [[fuzzy]] ==== Fuzzy queries From 222a4fa765581980162c261411d1abc239e80eea Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Fri, 2 Sep 2016 11:56:44 -0700 Subject: [PATCH 30/40] Reduce the number of threads and scripts being used in multi-threaded tests to prevent OOM from deprecation logging. --- .../JavaScriptScriptMultiThreadedTests.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java index 634a4ca6dfa..c3614952ecf 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java @@ -41,7 +41,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); - Thread[] threads = new Thread[50]; + Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { @@ -57,7 +57,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { vars.put("x", x); vars.put("y", y); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars); - for (int i = 0; i < 100000; i++) { + for (int i = 0; i < between(100, 1000); i++) { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } @@ -83,7 +83,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); - Thread[] threads = new Thread[50]; + Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { @@ -96,7 +96,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { Map vars = new HashMap(); vars.put("x", x); ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars); - for (int i = 0; i < 100000; i++) { + for (int i = 0; i < between(100, 1000); i++) { long y = Randomness.get().nextInt(); long addition = x + y; script.setNextVar("y", y); @@ -125,7 +125,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { final Object compiled = se.compile(null, "x + y", Collections.emptyMap()); final AtomicBoolean failed = new AtomicBoolean(); - Thread[] threads = new Thread[50]; + Thread[] threads = new Thread[between(3, 12)]; final CountDownLatch latch = new CountDownLatch(threads.length); final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1); for (int i = 0; i < threads.length; i++) { @@ -135,7 +135,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { try { barrier.await(); Map runtimeVars = new HashMap(); - for (int i = 0; i < 100000; i++) { + for (int i = 0; i < between(100, 1000); i++) { long x = Randomness.get().nextInt(); long y = Randomness.get().nextInt(); long addition = x + y; From d2ab42eabe0b2c2dda62ab895245ec21e8d4da6b Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Fri, 2 Sep 2016 14:57:22 -0400 Subject: [PATCH 31/40] [TESTS] added higher level logging to the testShadowReplicaNaturalRelocation test --- .../org/elasticsearch/index/IndexWithShadowReplicasIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 4d962fb6c88..989c60fc916 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -594,6 +594,12 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { * Tests that shadow replicas can be "naturally" rebalanced and relocated * around the cluster. By "naturally" I mean without using the reroute API */ + // This test failed on CI when trying to assert that all the shard data has been deleted + // from the index path. It has not been reproduced locally. Despite the IndicesService + // deleting the index and hence, deleting all the shard data for the index, the test + // failure still showed some Lucene files in the data directory for that index. Not sure + // why that is, so turning on more logging here. + @TestLogging("indices:TRACE,env:TRACE") public void testShadowReplicaNaturalRelocation() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); From c992a007c8c272d9e91f07b281070f2af4dc4347 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 2 Sep 2016 21:07:55 +0200 Subject: [PATCH 32/40] Pass on maxUnsafeAutoIdTimestamp on recovery / relocation (#20300) To ensure we don't add documents more than once even if it's mostly paranoia except of one case where we relocated a shards away and back to the same node while an initial request is in flight but has not yet finished AND is retried. Yet, this is a possible case and for that reason we ensure we pass on the maxUnsafeAutoIdTimestamp on when we prepare for translog recovery. Relates to #20211 --- .../index/engine/EngineConfig.java | 18 ++++++--- .../index/engine/InternalEngine.java | 7 ++-- .../index/engine/SegmentsStats.java | 2 +- .../elasticsearch/index/shard/IndexShard.java | 16 ++++---- .../index/shard/StoreRecovery.java | 5 ++- .../recovery/PeerRecoveryTargetService.java | 2 +- ...ryPrepareForTranslogOperationsRequest.java | 11 ++++- .../recovery/RecoverySourceHandler.java | 3 +- .../indices/recovery/RecoveryTarget.java | 4 +- .../recovery/RecoveryTargetHandler.java | 4 +- .../recovery/RemoteRecoveryTargetHandler.java | 4 +- .../index/engine/InternalEngineTests.java | 40 ++++++++++++++----- .../index/engine/ShadowEngineTests.java | 3 +- .../ESIndexLevelReplicationTestCase.java | 10 +++-- .../IndexLevelReplicationTests.java | 25 ++++++++++++ .../index/shard/IndexShardTests.java | 4 +- .../index/shard/RefreshListenersTests.java | 5 +-- 17 files changed, 116 insertions(+), 47 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index a6b5583a0e2..9f9d2186a83 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -65,6 +66,7 @@ public final class EngineConfig { private final Engine.EventListener eventListener; private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; + private final long maxUnsafeAutoIdTimestamp; @Nullable private final RefreshListeners refreshListeners; @@ -107,10 +109,11 @@ public final class EngineConfig { */ public EngineConfig(OpenMode openMode, ShardId shardId, ThreadPool threadPool, IndexSettings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy, - MergePolicy mergePolicy,Analyzer analyzer, + MergePolicy mergePolicy, Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.EventListener eventListener, TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, - TranslogConfig translogConfig, TimeValue flushMergesAfter, RefreshListeners refreshListeners) { + TranslogConfig translogConfig, TimeValue flushMergesAfter, RefreshListeners refreshListeners, + long maxUnsafeAutoIdTimestamp) { if (openMode == null) { throw new IllegalArgumentException("openMode must not be null"); } @@ -137,6 +140,9 @@ public final class EngineConfig { this.flushMergesAfter = flushMergesAfter; this.openMode = openMode; this.refreshListeners = refreshListeners; + assert maxUnsafeAutoIdTimestamp >= IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP : + "maxUnsafeAutoIdTimestamp must be >= -1 but was " + maxUnsafeAutoIdTimestamp; + this.maxUnsafeAutoIdTimestamp = maxUnsafeAutoIdTimestamp; } /** @@ -323,10 +329,10 @@ public final class EngineConfig { } /** - * Returns true iff auto generated IDs should be optimized inside the engine for append only. - * The default is true. + * Returns the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine. + * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs */ - public boolean getOptimizeAutoGeneratedIds() { - return indexSettings.getValue(INDEX_OPTIMIZE_AUTO_GENERATED_IDS); + public long getMaxUnsafeAutoIdTimestamp() { + return indexSettings.getValue(INDEX_OPTIMIZE_AUTO_GENERATED_IDS) ? maxUnsafeAutoIdTimestamp : Long.MAX_VALUE; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 45311fef518..774465fb71a 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -127,10 +127,11 @@ public class InternalEngine extends Engine { public InternalEngine(EngineConfig engineConfig) throws EngineException { super(engineConfig); openMode = engineConfig.getOpenMode(); - if (engineConfig.getOptimizeAutoGeneratedIds() == false - || engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha6)) { + if (engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha6)) { // no optimization for pre 5.0.0.alpha6 since translog might not have all information needed maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE); + } else { + maxUnsafeAutoIdTimestamp.set(engineConfig.getMaxUnsafeAutoIdTimestamp()); } this.versionMap = new LiveVersionMap(); store.incRef(); @@ -1299,7 +1300,7 @@ public class InternalEngine extends Engine { mergeScheduler.refreshConfig(); // config().isEnableGcDeletes() or config.getGcDeletesInMillis() may have changed: maybePruneDeletedTombstones(); - if (engineConfig.getOptimizeAutoGeneratedIds() == false) { + if (engineConfig.getMaxUnsafeAutoIdTimestamp() == Long.MAX_VALUE) { // this is an anti-viral settings you can only opt out for the entire index // only if a shard starts up again due to relocation or if the index is closed // the setting will be re-interpreted if it's set to true diff --git a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java index 7a4bf8155b3..637beebfec8 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java +++ b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java @@ -279,7 +279,7 @@ public class SegmentsStats implements Streamable, ToXContent { } /** - * Returns the max timestamp that is used to de-optimize documetns with auto-generated IDs in the engine. + * Returns the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine. * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs */ public long getMaxUnsafeAutoIdTimestamp() { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 45b8104cb75..a38585cf468 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -42,6 +42,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.RecoverySource; @@ -959,11 +960,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl translogStats.totalOperations(0); translogStats.totalOperationsOnStart(0); } - internalPerformTranslogRecovery(false, indexExists); + internalPerformTranslogRecovery(false, indexExists, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); assert recoveryState.getStage() == RecoveryState.Stage.TRANSLOG : "TRANSLOG stage expected but was: " + recoveryState.getStage(); } - private void internalPerformTranslogRecovery(boolean skipTranslogRecovery, boolean indexExists) throws IOException { + private void internalPerformTranslogRecovery(boolean skipTranslogRecovery, boolean indexExists, long maxUnsafeAutoIdTimestamp) throws IOException { if (state != IndexShardState.RECOVERING) { throw new IndexShardNotRecoveringException(shardId, state); } @@ -992,7 +993,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } else { openMode = EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG; } - final EngineConfig config = newEngineConfig(openMode); + final EngineConfig config = newEngineConfig(openMode, maxUnsafeAutoIdTimestamp); // we disable deletes since we allow for operations to be executed against the shard while recovering // but we need to make sure we don't loose deletes until we are done recovering config.setEnableGcDeletes(false); @@ -1012,9 +1013,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * the replay of the transaction log which is required in cases where we restore a previous index or recover from * a remote peer. */ - public void skipTranslogRecovery() throws IOException { + public void skipTranslogRecovery(long maxUnsafeAutoIdTimestamp) throws IOException { assert getEngineOrNull() == null : "engine was already created"; - internalPerformTranslogRecovery(true, true); + internalPerformTranslogRecovery(true, true, maxUnsafeAutoIdTimestamp); assert recoveryState.getTranslog().recoveredOperations() == 0; } @@ -1603,12 +1604,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return mapperService.documentMapperWithAutoCreate(type); } - private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode) { + private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode, long maxUnsafeAutoIdTimestamp) { final IndexShardRecoveryPerformer translogRecoveryPerformer = new IndexShardRecoveryPerformer(shardId, mapperService, logger); return new EngineConfig(openMode, shardId, threadPool, indexSettings, warmer, store, deletionPolicy, indexSettings.getMergePolicy(), mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, - IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), refreshListeners); + IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), refreshListeners, + maxUnsafeAutoIdTimestamp); } /** diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 6ff897a8c89..44b4ed933f7 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -30,6 +30,7 @@ import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.RecoverySource; @@ -346,7 +347,7 @@ final class StoreRecovery { recoveryState.getIndex().updateVersion(version); if (recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { assert indexShouldExists; - indexShard.skipTranslogRecovery(); + indexShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); } else { // since we recover from local, just fill the files and size try { @@ -398,7 +399,7 @@ final class StoreRecovery { } final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName); repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState()); - indexShard.skipTranslogRecovery(); + indexShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done"); } catch (Exception e) { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 778e6c88fa8..f26d0787f41 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -304,7 +304,7 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception { try (RecoveriesCollection.RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { - recoveryRef.status().prepareForTranslogOperations(request.totalTranslogOps()); + recoveryRef.status().prepareForTranslogOperations(request.totalTranslogOps(), request.getMaxUnsafeAutoIdTimestamp()); } channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java index 3d5d7052c9d..171102d07ea 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.indices.recovery; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -31,6 +32,7 @@ import java.io.IOException; */ public class RecoveryPrepareForTranslogOperationsRequest extends TransportRequest { + private long maxUnsafeAutoIdTimestamp = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; private long recoveryId; private ShardId shardId; private int totalTranslogOps = RecoveryState.Translog.UNKNOWN; @@ -38,10 +40,11 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques public RecoveryPrepareForTranslogOperationsRequest() { } - RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps) { + RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps, long maxUnsafeAutoIdTimestamp) { this.recoveryId = recoveryId; this.shardId = shardId; this.totalTranslogOps = totalTranslogOps; + this.maxUnsafeAutoIdTimestamp = maxUnsafeAutoIdTimestamp; } public long recoveryId() { @@ -56,12 +59,17 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques return totalTranslogOps; } + public long getMaxUnsafeAutoIdTimestamp() { + return maxUnsafeAutoIdTimestamp; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); shardId = ShardId.readShardId(in); totalTranslogOps = in.readVInt(); + maxUnsafeAutoIdTimestamp = in.readLong(); } @Override @@ -70,5 +78,6 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques out.writeLong(recoveryId); shardId.writeTo(out); out.writeVInt(totalTranslogOps); + out.writeLong(maxUnsafeAutoIdTimestamp); } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index cb3356358cd..790376ba78e 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -346,7 +346,8 @@ public class RecoverySourceHandler { // Send a request preparing the new shard's translog to receive // operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes - cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(totalTranslogOps)); + cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(totalTranslogOps, + shard.segmentStats(false).getMaxUnsafeAutoIdTimestamp())); stopWatch.stop(); response.startTime = stopWatch.totalTime().millis() - startEngineStart; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 9d833fe163e..d608dc50e23 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -327,9 +327,9 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget /*** Implementation of {@link RecoveryTargetHandler } */ @Override - public void prepareForTranslogOperations(int totalTranslogOps) throws IOException { + public void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException { state().getTranslog().totalOperations(totalTranslogOps); - indexShard().skipTranslogRecovery(); + indexShard().skipTranslogRecovery(maxUnsafeAutoIdTimestamp); } @Override diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 3d7e4f29c35..18966028792 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -33,8 +33,10 @@ public interface RecoveryTargetHandler { * Prepares the tranget to receive translog operations, after all file have been copied * * @param totalTranslogOps total translog operations expected to be sent + * @param maxUnsafeAutoIdTimestamp the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine. + * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs */ - void prepareForTranslogOperations(int totalTranslogOps) throws IOException; + void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException; /** * The finalize request clears unreferenced translog files, refreshes the engine now that diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index 428c1615880..327eb3b8eca 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -74,9 +74,9 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { } @Override - public void prepareForTranslogOperations(int totalTranslogOps) throws IOException { + public void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException { transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, - new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps), + new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, maxUnsafeAutoIdTimestamp), TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); } diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 0e9d7aeb396..7b62966515d 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -53,6 +53,7 @@ import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; @@ -206,7 +207,8 @@ public class InternalEngineTests extends ESTestCase { return new EngineConfig(openMode, config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getDeletionPolicy(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(), config.getQueryCache(), - config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getRefreshListeners()); + config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getRefreshListeners(), + config.getMaxUnsafeAutoIdTimestamp()); } @Override @@ -276,7 +278,7 @@ public class InternalEngineTests extends ESTestCase { } protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) throws IOException { - EngineConfig config = config(indexSettings, store, translogPath, mergePolicy); + EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); InternalEngine internalEngine = new InternalEngine(config); if (config.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { internalEngine.recoverFromTranslog(); @@ -284,7 +286,7 @@ public class InternalEngineTests extends ESTestCase { return internalEngine; } - public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) { + public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, long maxUnsafeAutoIdTimestamp) { IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); final EngineConfig.OpenMode openMode; @@ -306,7 +308,7 @@ public class InternalEngineTests extends ESTestCase { EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, createSnapshotDeletionPolicy(), mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), null); + IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), null, maxUnsafeAutoIdTimestamp); return config; } @@ -903,7 +905,7 @@ public class InternalEngineTests extends ESTestCase { public void testSyncedFlush() throws IOException { try (Store store = createStore(); Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogByteSizeMergePolicy()))) { + new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -930,7 +932,7 @@ public class InternalEngineTests extends ESTestCase { for (int i = 0; i < iters; i++) { try (Store store = createStore(); InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogDocMergePolicy()))) { + new LogDocMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); Engine.Index doc1 = new Engine.Index(newUid("1"), doc); @@ -1158,7 +1160,7 @@ public class InternalEngineTests extends ESTestCase { public void testForceMerge() throws IOException { try (Store store = createStore(); Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogByteSizeMergePolicy()))) { // use log MP here we test some behavior in ESMP + new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { // use log MP here we test some behavior in ESMP int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null); @@ -1601,7 +1603,7 @@ public class InternalEngineTests extends ESTestCase { public void testEnableGcDeletes() throws Exception { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy()))) { + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { engine.config().setEnableGcDeletes(false); // Add document @@ -1737,7 +1739,7 @@ public class InternalEngineTests extends ESTestCase { // expected } // now it should be OK. - EngineConfig config = copy(config(defaultSettings, store, primaryTranslogDir, newMergePolicy()), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG); + EngineConfig config = copy(config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG); engine = new InternalEngine(config); } @@ -2057,7 +2059,7 @@ public class InternalEngineTests extends ESTestCase { config.getIndexSettings(), null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), config.getRefreshListeners()); + TimeValue.timeValueMinutes(5), config.getRefreshListeners(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); try { InternalEngine internalEngine = new InternalEngine(brokenConfig); @@ -2112,7 +2114,7 @@ public class InternalEngineTests extends ESTestCase { public void testCurrentTranslogIDisCommitted() throws IOException { try (Store store = createStore()) { - EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy()); + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); // create { @@ -2374,6 +2376,22 @@ public class InternalEngineTests extends ESTestCase { assertTrue(engine.indexWriterHasDeletions()); } + public void testEngineMaxTimestampIsInitialized() throws IOException { + try (Store store = createStore(); + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, + IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { + assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + + } + + long maxTimestamp = Math.abs(randomLong()); + try (Store store = createStore(); + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, + maxTimestamp))) { + assertEquals(maxTimestamp, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + } + } + public void testAppendConcurrently() throws InterruptedException, IOException { Thread[] thread = new Thread[randomIntBetween(3, 5)]; int numDocs = randomIntBetween(1000, 10000); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 7d2c6d6de41..6dea774f258 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -38,6 +38,7 @@ import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; @@ -247,7 +248,7 @@ public class ShadowEngineTests extends ESTestCase { EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, createSnapshotDeletionPolicy(), mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), refreshListeners); + TimeValue.timeValueMinutes(5), refreshListeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); return config; } diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index a3bfa957def..ec794091a42 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -110,7 +110,7 @@ import static org.hamcrest.Matchers.equalTo; public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { protected ThreadPool threadPool; - private final Index index = new Index("test", "uuid"); + protected final Index index = new Index("test", "uuid"); private final ShardId shardId = new ShardId(index, 0); private final Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() { @@ -284,8 +284,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { primary.recoverFromStore(); primary.updateRoutingEntry(ShardRoutingHelper.moveToStarted(primary.routingEntry())); for (IndexShard replicaShard : replicas) { - recoverReplica(replicaShard, - (replica, sourceNode) -> new RecoveryTarget(replica, sourceNode, recoveryListener, version -> {})); + recoverReplica(replicaShard); } } @@ -294,6 +293,11 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { replicas.add(replica); return replica; } + + public void recoverReplica(IndexShard replica) throws IOException { + recoverReplica(replica, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> {})); + } + public void recoverReplica(IndexShard replica, BiFunction targetSupplier) throws IOException { recoverReplica(replica, targetSupplier, true); diff --git a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 3bbd8c3bbcc..407b374a92e 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -18,9 +18,13 @@ */ package org.elasticsearch.index.replication; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineTests; +import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTests; import org.elasticsearch.index.store.Store; @@ -96,4 +100,25 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase } } + public void testInheritMaxValidAutoIDTimestampOnRecovery() throws Exception { + try (ReplicationGroup shards = createGroup(0)) { + shards.startAll(); + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}"); + indexRequest.onRetry(); // force an update of the timestamp + final IndexResponse response = shards.index(indexRequest); + assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); + if (randomBoolean()) { // lets check if that also happens if no translog record is replicated + shards.flush(); + } + IndexShard replica = shards.addReplica(); + shards.recoverReplica(replica); + + SegmentsStats segmentsStats = replica.segmentStats(false); + SegmentsStats primarySegmentStats = shards.getPrimary().segmentStats(false); + assertNotEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, primarySegmentStats.getMaxUnsafeAutoIdTimestamp()); + assertEquals(primarySegmentStats.getMaxUnsafeAutoIdTimestamp(), segmentsStats.getMaxUnsafeAutoIdTimestamp()); + assertNotEquals(Long.MAX_VALUE, segmentsStats.getMaxUnsafeAutoIdTimestamp()); + } + } + } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 3c7b09a980d..b4725a8506d 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1611,7 +1611,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); newShard.recoveryState().getTranslog().totalOperations(operations.size()); - newShard.skipTranslogRecovery(); + newShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); newShard.performBatchRecovery(operations); assertFalse(newShard.getTranslog().syncNeeded()); } @@ -1668,7 +1668,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { List operations = new ArrayList<>(); operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); - newShard.skipTranslogRecovery(); + newShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); // Shard is still inactive since we haven't started recovering yet assertFalse(newShard.isActive()); newShard.performBatchRecovery(operations); diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 20fd02b5163..f3f15b2639c 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -36,7 +37,6 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; @@ -66,7 +66,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -125,7 +124,7 @@ public class RefreshListenersTests extends ESTestCase { store, new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()), newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), listeners); + TimeValue.timeValueMinutes(5), listeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); engine = new InternalEngine(config); } From bfd072bc102467ea62125983cbde8195e8064548 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 2 Sep 2016 18:22:30 -0400 Subject: [PATCH 33/40] Switch more docs to CONSOLE Related to #18160 --- docs/reference/indices/shrink-index.asciidoc | 10 ++++-- docs/reference/indices/stats.asciidoc | 16 ++++++---- docs/reference/indices/templates.asciidoc | 33 +++++++++++--------- 3 files changed, 37 insertions(+), 22 deletions(-) diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 713f2149547..027cf8b924d 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -41,6 +41,8 @@ PUT /my_source_index/_settings } } -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT my_source_index\n/] <1> Forces the relocation of a copy of each shard to the node with name `shrink_node_name`. See <> for more options. @@ -62,6 +64,8 @@ the following request: -------------------------------------------------- POST my_source_index/_shrink/my_target_index -------------------------------------------------- +// CONSOLE +// TEST[continued] The above request returns immediately once the target index has been added to the cluster state -- it doesn't wait for the shrink operation to start. @@ -105,6 +109,8 @@ POST my_source_index/_shrink/my_target_index } } -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT my_source_index\n{"settings": {"index.blocks.write": true}}\n/] <1> The number of shards in the target index. This must be a factor of the number of shards in the source index. @@ -139,6 +145,6 @@ replicas and may decide to relocate the primary shard to another node. [float] === Wait For Active Shards -Because the shrink operation creates a new index to shrink the shards to, -the <> setting +Because the shrink operation creates a new index to shrink the shards to, +the <> setting on index creation applies to the shrink index action as well. diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc index e990a7ff6bd..a95b1c81ae1 100644 --- a/docs/reference/indices/stats.asciidoc +++ b/docs/reference/indices/stats.asciidoc @@ -10,15 +10,18 @@ all indices: [source,js] -------------------------------------------------- -curl localhost:9200/_stats +GET /_stats -------------------------------------------------- +// CONSOLE Specific index stats can be retrieved using: [source,js] -------------------------------------------------- -curl localhost:9200/index1,index2/_stats +GET /index1,index2/_stats -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT index1\nPUT index2\n/] By default, all stats are returned, returning only specific stats can be specified as well in the URI. Those stats can be any of: @@ -74,12 +77,14 @@ Here are some samples: [source,js] -------------------------------------------------- # Get back stats for merge and refresh only for all indices -curl 'localhost:9200/_stats/merge,refresh' +GET /_stats/merge,refresh # Get back stats for type1 and type2 documents for the my_index index -curl 'localhost:9200/my_index/_stats/indexing?types=type1,type2 +GET /my_index/_stats/indexing?types=type1,type2 # Get back just search stats for group1 and group2 -curl 'localhost:9200/_stats/search?groups=group1,group2 +GET /_stats/search?groups=group1,group2 -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT my_index\n/] The stats returned are aggregated on the index level, with `primaries` and `total` aggregations, where `primaries` are the values for only the @@ -91,4 +96,3 @@ Note, as shards move around the cluster, their stats will be cleared as they are created on other nodes. On the other hand, even though a shard "left" a node, that node will still retain the stats that shard contributed to. - diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc index 754a93e3096..6e2f7ce91f1 100644 --- a/docs/reference/indices/templates.asciidoc +++ b/docs/reference/indices/templates.asciidoc @@ -38,6 +38,7 @@ PUT _template/template_1 } -------------------------------------------------- // CONSOLE +// TESTSETUP Defines a template named template_1, with a template pattern of `te*`. The settings and mappings will be applied to any index name that matches @@ -47,7 +48,7 @@ It is also possible to include aliases in an index template as follows: [source,js] -------------------------------------------------- -curl -XPUT localhost:9200/_template/template_1 -d ' +PUT _template/template_1 { "template" : "te*", "settings" : { @@ -64,8 +65,9 @@ curl -XPUT localhost:9200/_template/template_1 -d ' "{index}-alias" : {} <1> } } -' -------------------------------------------------- +// CONSOLE +// TEST[s/^/DELETE _template\/template_1\n/] <1> the `{index}` placeholder within the alias name will be replaced with the actual index name that the template gets applied to during index creation. @@ -79,8 +81,9 @@ Index templates are identified by a name (in the above case [source,js] -------------------------------------------------- -curl -XDELETE localhost:9200/_template/template_1 +DELETE /_template/template_1 -------------------------------------------------- +// CONSOLE [float] [[getting]] @@ -91,24 +94,26 @@ Index templates are identified by a name (in the above case [source,js] -------------------------------------------------- -curl -XGET localhost:9200/_template/template_1 +GET /_template/template_1 -------------------------------------------------- +// CONSOLE You can also match several templates by using wildcards like: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/_template/temp* -curl -XGET localhost:9200/_template/template_1,template_2 +GET /_template/temp* +GET /_template/template_1,template_2 -------------------------------------------------- +// CONSOLE To get list of all index templates you can run: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/_template/ +GET /_template -------------------------------------------------- - +// CONSOLE [float] [[indices-templates-exists]] @@ -118,13 +123,13 @@ Used to check if the template exists or not. For example: [source,js] ----------------------------------------------- -curl -XHEAD -i localhost:9200/_template/template_1 +HEAD _template/template_1 ----------------------------------------------- +// CONSOLE The HTTP status code indicates if the template with the given name exists or not. A status code `200` means it exists, a `404` it does not. - [float] [[multiple-templates]] === Multiple Template Matching @@ -137,7 +142,7 @@ orders overriding them. For example: [source,js] -------------------------------------------------- -curl -XPUT localhost:9200/_template/template_1 -d ' +PUT /_template/template_1 { "template" : "*", "order" : 0, @@ -150,9 +155,8 @@ curl -XPUT localhost:9200/_template/template_1 -d ' } } } -' -curl -XPUT localhost:9200/_template/template_2 -d ' +PUT /_template/template_2 { "template" : "te*", "order" : 1, @@ -165,8 +169,9 @@ curl -XPUT localhost:9200/_template/template_2 -d ' } } } -' -------------------------------------------------- +// CONSOLE +// TEST[s/^/DELETE _template\/template_1\n/] The above will disable storing the `_source` on all `type1` types, but for indices of that start with `te*`, source will still be enabled. From 40f889b825dbae9638a580e00f798923de03648c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 2 Sep 2016 18:36:57 -0400 Subject: [PATCH 34/40] Warn if unsupported logging configuration present This commit adds a warning that an unsupported logging configuration is present and points users to the new logging configuration file. Relates #20309 --- .../common/logging/LogConfigurator.java | 32 ++++++++++++++++-- .../common/logging/EvilLoggerTests.java | 33 ++++++++++++++----- .../common/logging/config/logging.yml | 2 ++ 3 files changed, 56 insertions(+), 11 deletions(-) create mode 100644 qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 3b7ec0deb34..522ff589758 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -30,6 +30,7 @@ import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration; import org.apache.logging.log4j.core.config.composite.CompositeConfiguration; import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration; import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; @@ -43,6 +44,7 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Arrays; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -63,9 +65,9 @@ public class LogConfigurator { final LoggerContext context = (LoggerContext) LogManager.getContext(false); if (resolveConfig) { - final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); final List configurations = new ArrayList<>(); final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory(); + final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { @@ -87,10 +89,36 @@ public class LogConfigurator { final Level level = ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings); Loggers.setLevel(Loggers.getLogger(key.substring("logger.".length())), level); } + + warnIfOldConfigurationFilePresent(environment); + } + + private static void warnIfOldConfigurationFilePresent(final Environment environment) throws IOException { + // TODO: the warning for unsupported logging configurations can be removed in 6.0.0 + assert Version.CURRENT.major < 6; + final List suffixes = Arrays.asList(".yml", ".yaml", ".json", ".properties"); + final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); + Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + final String fileName = file.getFileName().toString(); + if (fileName.startsWith("logging")) { + for (final String suffix : suffixes) { + if (fileName.endsWith(suffix)) { + Loggers.getLogger(LogConfigurator.class).warn( + "ignoring unsupported logging configuration file [{}], logging is configured via [{}]", + file.toString(), + file.getParent().resolve("log4j2.properties")); + } + } + } + return FileVisitResult.CONTINUE; + } + }); } @SuppressForbidden(reason = "sets system property for logging configuration") - private static void setLogConfigurationSystemProperty(Environment environment, Settings settings) { + private static void setLogConfigurationSystemProperty(final Environment environment, final Settings settings) { System.setProperty("es.logs", environment.logsFile().resolve(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()).toString()); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index a14ee2282c4..168453df763 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -21,15 +21,16 @@ package org.elasticsearch.common.logging; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.hamcrest.RegexMatcher; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -66,21 +67,22 @@ public class EvilLoggerTests extends ESTestCase { testLogger.trace("This is a trace message"); final String path = System.getProperty("es.logs") + ".log"; final List events = Files.readAllLines(PathUtils.get(path)); - assertThat(events.size(), equalTo(5)); + assertThat(events.size(), equalTo(6)); // the five messages me log plus a warning for unsupported configuration files final String location = "org.elasticsearch.common.logging.EvilLoggerTests.testLocationInfoTest"; - assertLogLine(events.get(0), Level.ERROR, location, "This is an error message"); - assertLogLine(events.get(1), Level.WARN, location, "This is a warning message"); - assertLogLine(events.get(2), Level.INFO, location, "This is an info message"); - assertLogLine(events.get(3), Level.DEBUG, location, "This is a debug message"); - assertLogLine(events.get(4), Level.TRACE, location, "This is a trace message"); + // the first message is a warning for unsupported configuration files + assertLogLine(events.get(1), Level.ERROR, location, "This is an error message"); + assertLogLine(events.get(2), Level.WARN, location, "This is a warning message"); + assertLogLine(events.get(3), Level.INFO, location, "This is an info message"); + assertLogLine(events.get(4), Level.DEBUG, location, "This is a debug message"); + assertLogLine(events.get(5), Level.TRACE, location, "This is a trace message"); } private void assertLogLine(final String logLine, final Level level, final String location, final String message) { final Matcher matcher = Pattern.compile("\\[(.*)\\]\\[(.*)\\(.*\\)\\] (.*)").matcher(logLine); assertTrue(logLine, matcher.matches()); assertThat(matcher.group(1), equalTo(level.toString())); - assertThat(matcher.group(2), equalTo(location)); - assertThat(matcher.group(3), equalTo(message)); + assertThat(matcher.group(2), RegexMatcher.matches(location)); + assertThat(matcher.group(3), RegexMatcher.matches(message)); } public void testDeprecationLogger() throws IOException { @@ -95,4 +97,17 @@ public class EvilLoggerTests extends ESTestCase { "This is a deprecation message"); } + public void testUnsupportedLoggingConfigurationFiles() throws IOException { + // TODO: the warning for unsupported logging configurations can be removed in 6.0.0 + assert Version.CURRENT.major < 6; + final String path = System.getProperty("es.logs") + ".log"; + final List events = Files.readAllLines(PathUtils.get(path)); + assertThat(events.size(), equalTo(1)); + assertLogLine( + events.get(0), + Level.WARN, + "org\\.elasticsearch\\.common\\.logging\\.LogConfigurator.*", + "ignoring unsupported logging configuration file \\[.*\\], logging is configured via \\[.*\\]"); + } + } diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml new file mode 100644 index 00000000000..5aa98f454f3 --- /dev/null +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml @@ -0,0 +1,2 @@ +logger.level: INFO +rootLogger: ${logger.level}, terminal From b9966fed3675deb09ce344aeb24b25b98b048e79 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 2 Sep 2016 20:26:32 -0400 Subject: [PATCH 35/40] Hack around Log4j bug rendering exceptions Log4j has a bug where it does not handle a security exception that can be thrown when it is rendering a stack trace. This commit intentionally introduces jar hell with the ThrowableProxy class to work around this bug until a fix is a released. Relates #20306 --- .../resources/checkstyle_suppressions.xml | 3 + .../log4j/core/impl/ThrowableProxy.java | 665 ++++++++++++++++++ .../org/elasticsearch/bootstrap/JarHell.java | 10 +- 3 files changed, 677 insertions(+), 1 deletion(-) create mode 100644 core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 6b825728bf8..d50f362a73e 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -10,6 +10,9 @@ + + + diff --git a/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java b/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java new file mode 100644 index 00000000000..37ab0a15391 --- /dev/null +++ b/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java @@ -0,0 +1,665 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache license, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the license for the specific language governing permissions and + * limitations under the license. + */ + +package org.apache.logging.log4j.core.impl; + +import java.io.Serializable; +import java.net.URL; +import java.security.CodeSource; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Stack; + +import org.apache.logging.log4j.core.util.Loader; +import org.apache.logging.log4j.status.StatusLogger; +import org.apache.logging.log4j.util.ReflectionUtil; +import org.apache.logging.log4j.util.Strings; + +/** + * Wraps a Throwable to add packaging information about each stack trace element. + * + *

+ * A proxy is used to represent a throwable that may not exist in a different class loader or JVM. When an application + * deserializes a ThrowableProxy, the throwable may not be set, but the throwable's information is preserved in other + * fields of the proxy like the message and stack trace. + *

+ * + *

+ * TODO: Move this class to org.apache.logging.log4j.core because it is used from LogEvent. + *

+ *

+ * TODO: Deserialize: Try to rebuild Throwable if the target exception is in this class loader? + *

+ */ +public class ThrowableProxy implements Serializable { + + private static final String CAUSED_BY_LABEL = "Caused by: "; + private static final String SUPPRESSED_LABEL = "Suppressed: "; + private static final String WRAPPED_BY_LABEL = "Wrapped by: "; + + /** + * Cached StackTracePackageElement and ClassLoader. + *

+ * Consider this class private. + *

+ */ + static class CacheEntry { + private final ExtendedClassInfo element; + private final ClassLoader loader; + + public CacheEntry(final ExtendedClassInfo element, final ClassLoader loader) { + this.element = element; + this.loader = loader; + } + } + + private static final ThrowableProxy[] EMPTY_THROWABLE_PROXY_ARRAY = new ThrowableProxy[0]; + + private static final char EOL = '\n'; + + private static final long serialVersionUID = -2752771578252251910L; + + private final ThrowableProxy causeProxy; + + private int commonElementCount; + + private final ExtendedStackTraceElement[] extendedStackTrace; + + private final String localizedMessage; + + private final String message; + + private final String name; + + private final ThrowableProxy[] suppressedProxies; + + private final transient Throwable throwable; + + /** + * For JSON and XML IO via Jackson. + */ + @SuppressWarnings("unused") + private ThrowableProxy() { + this.throwable = null; + this.name = null; + this.extendedStackTrace = null; + this.causeProxy = null; + this.message = null; + this.localizedMessage = null; + this.suppressedProxies = EMPTY_THROWABLE_PROXY_ARRAY; + } + + /** + * Constructs the wrapper for the Throwable that includes packaging data. + * + * @param throwable + * The Throwable to wrap, must not be null. + */ + public ThrowableProxy(final Throwable throwable) { + this(throwable, null); + } + + /** + * Constructs the wrapper for the Throwable that includes packaging data. + * + * @param throwable + * The Throwable to wrap, must not be null. + * @param visited + * The set of visited suppressed exceptions. + */ + private ThrowableProxy(final Throwable throwable, final Set visited) { + this.throwable = throwable; + this.name = throwable.getClass().getName(); + this.message = throwable.getMessage(); + this.localizedMessage = throwable.getLocalizedMessage(); + final Map map = new HashMap<>(); + final Stack> stack = ReflectionUtil.getCurrentStackTrace(); + this.extendedStackTrace = this.toExtendedStackTrace(stack, map, null, throwable.getStackTrace()); + final Throwable throwableCause = throwable.getCause(); + final Set causeVisited = new HashSet<>(1); + this.causeProxy = throwableCause == null ? null : new ThrowableProxy(throwable, stack, map, throwableCause, visited, causeVisited); + this.suppressedProxies = this.toSuppressedProxies(throwable, visited); + } + + /** + * Constructs the wrapper for a Throwable that is referenced as the cause by another Throwable. + * + * @param parent + * The Throwable referencing this Throwable. + * @param stack + * The Class stack. + * @param map + * The cache containing the packaging data. + * @param cause + * The Throwable to wrap. + * @param suppressedVisited TODO + * @param causeVisited TODO + */ + private ThrowableProxy(final Throwable parent, final Stack> stack, final Map map, + final Throwable cause, final Set suppressedVisited, final Set causeVisited) { + causeVisited.add(cause); + this.throwable = cause; + this.name = cause.getClass().getName(); + this.message = this.throwable.getMessage(); + this.localizedMessage = this.throwable.getLocalizedMessage(); + this.extendedStackTrace = this.toExtendedStackTrace(stack, map, parent.getStackTrace(), cause.getStackTrace()); + final Throwable causeCause = cause.getCause(); + this.causeProxy = causeCause == null || causeVisited.contains(causeCause) ? null : new ThrowableProxy(parent, + stack, map, causeCause, suppressedVisited, causeVisited); + this.suppressedProxies = this.toSuppressedProxies(cause, suppressedVisited); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (this.getClass() != obj.getClass()) { + return false; + } + final ThrowableProxy other = (ThrowableProxy) obj; + if (this.causeProxy == null) { + if (other.causeProxy != null) { + return false; + } + } else if (!this.causeProxy.equals(other.causeProxy)) { + return false; + } + if (this.commonElementCount != other.commonElementCount) { + return false; + } + if (this.name == null) { + if (other.name != null) { + return false; + } + } else if (!this.name.equals(other.name)) { + return false; + } + if (!Arrays.equals(this.extendedStackTrace, other.extendedStackTrace)) { + return false; + } + if (!Arrays.equals(this.suppressedProxies, other.suppressedProxies)) { + return false; + } + return true; + } + + private void formatCause(final StringBuilder sb, final String prefix, final ThrowableProxy cause, final List ignorePackages) { + formatThrowableProxy(sb, prefix, CAUSED_BY_LABEL, cause, ignorePackages); + } + + private void formatThrowableProxy(final StringBuilder sb, final String prefix, final String causeLabel, + final ThrowableProxy throwableProxy, final List ignorePackages) { + if (throwableProxy == null) { + return; + } + sb.append(prefix).append(causeLabel).append(throwableProxy).append(EOL); + this.formatElements(sb, prefix, throwableProxy.commonElementCount, + throwableProxy.getStackTrace(), throwableProxy.extendedStackTrace, ignorePackages); + this.formatSuppressed(sb, prefix + "\t", throwableProxy.suppressedProxies, ignorePackages); + this.formatCause(sb, prefix, throwableProxy.causeProxy, ignorePackages); + } + + private void formatSuppressed(final StringBuilder sb, final String prefix, final ThrowableProxy[] suppressedProxies, + final List ignorePackages) { + if (suppressedProxies == null) { + return; + } + for (final ThrowableProxy suppressedProxy : suppressedProxies) { + final ThrowableProxy cause = suppressedProxy; + formatThrowableProxy(sb, prefix, SUPPRESSED_LABEL, cause, ignorePackages); + } + } + + private void formatElements(final StringBuilder sb, final String prefix, final int commonCount, + final StackTraceElement[] causedTrace, final ExtendedStackTraceElement[] extStackTrace, + final List ignorePackages) { + if (ignorePackages == null || ignorePackages.isEmpty()) { + for (final ExtendedStackTraceElement element : extStackTrace) { + this.formatEntry(element, sb, prefix); + } + } else { + int count = 0; + for (int i = 0; i < extStackTrace.length; ++i) { + if (!this.ignoreElement(causedTrace[i], ignorePackages)) { + if (count > 0) { + appendSuppressedCount(sb, prefix, count); + count = 0; + } + this.formatEntry(extStackTrace[i], sb, prefix); + } else { + ++count; + } + } + if (count > 0) { + appendSuppressedCount(sb, prefix, count); + } + } + if (commonCount != 0) { + sb.append(prefix).append("\t... ").append(commonCount).append(" more").append(EOL); + } + } + + private void appendSuppressedCount(final StringBuilder sb, final String prefix, final int count) { + sb.append(prefix); + if (count == 1) { + sb.append("\t....").append(EOL); + } else { + sb.append("\t... suppressed ").append(count).append(" lines").append(EOL); + } + } + + private void formatEntry(final ExtendedStackTraceElement extStackTraceElement, final StringBuilder sb, final String prefix) { + sb.append(prefix); + sb.append("\tat "); + sb.append(extStackTraceElement); + sb.append(EOL); + } + + /** + * Formats the specified Throwable. + * + * @param sb + * StringBuilder to contain the formatted Throwable. + * @param cause + * The Throwable to format. + */ + public void formatWrapper(final StringBuilder sb, final ThrowableProxy cause) { + this.formatWrapper(sb, cause, null); + } + + /** + * Formats the specified Throwable. + * + * @param sb + * StringBuilder to contain the formatted Throwable. + * @param cause + * The Throwable to format. + * @param packages + * The List of packages to be suppressed from the trace. + */ + @SuppressWarnings("ThrowableResultOfMethodCallIgnored") + public void formatWrapper(final StringBuilder sb, final ThrowableProxy cause, final List packages) { + final Throwable caused = cause.getCauseProxy() != null ? cause.getCauseProxy().getThrowable() : null; + if (caused != null) { + this.formatWrapper(sb, cause.causeProxy); + sb.append(WRAPPED_BY_LABEL); + } + sb.append(cause).append(EOL); + this.formatElements(sb, "", cause.commonElementCount, + cause.getThrowable().getStackTrace(), cause.extendedStackTrace, packages); + } + + public ThrowableProxy getCauseProxy() { + return this.causeProxy; + } + + /** + * Format the Throwable that is the cause of this Throwable. + * + * @return The formatted Throwable that caused this Throwable. + */ + public String getCauseStackTraceAsString() { + return this.getCauseStackTraceAsString(null); + } + + /** + * Format the Throwable that is the cause of this Throwable. + * + * @param packages + * The List of packages to be suppressed from the trace. + * @return The formatted Throwable that caused this Throwable. + */ + public String getCauseStackTraceAsString(final List packages) { + final StringBuilder sb = new StringBuilder(); + if (this.causeProxy != null) { + this.formatWrapper(sb, this.causeProxy); + sb.append(WRAPPED_BY_LABEL); + } + sb.append(this.toString()); + sb.append(EOL); + this.formatElements(sb, "", 0, this.throwable.getStackTrace(), this.extendedStackTrace, packages); + return sb.toString(); + } + + /** + * Return the number of elements that are being omitted because they are common with the parent Throwable's stack + * trace. + * + * @return The number of elements omitted from the stack trace. + */ + public int getCommonElementCount() { + return this.commonElementCount; + } + + /** + * Gets the stack trace including packaging information. + * + * @return The stack trace including packaging information. + */ + public ExtendedStackTraceElement[] getExtendedStackTrace() { + return this.extendedStackTrace; + } + + /** + * Format the stack trace including packaging information. + * + * @return The formatted stack trace including packaging information. + */ + public String getExtendedStackTraceAsString() { + return this.getExtendedStackTraceAsString(null); + } + + /** + * Format the stack trace including packaging information. + * + * @param ignorePackages + * List of packages to be ignored in the trace. + * @return The formatted stack trace including packaging information. + */ + public String getExtendedStackTraceAsString(final List ignorePackages) { + final StringBuilder sb = new StringBuilder(this.name); + final String msg = this.message; + if (msg != null) { + sb.append(": ").append(msg); + } + sb.append(EOL); + final StackTraceElement[] causedTrace = this.throwable != null ? this.throwable.getStackTrace() : null; + this.formatElements(sb, "", 0, causedTrace, this.extendedStackTrace, ignorePackages); + this.formatSuppressed(sb, "\t", this.suppressedProxies, ignorePackages); + this.formatCause(sb, "", this.causeProxy, ignorePackages); + return sb.toString(); + } + + public String getLocalizedMessage() { + return this.localizedMessage; + } + + public String getMessage() { + return this.message; + } + + /** + * Return the FQCN of the Throwable. + * + * @return The FQCN of the Throwable. + */ + public String getName() { + return this.name; + } + + public StackTraceElement[] getStackTrace() { + return this.throwable == null ? null : this.throwable.getStackTrace(); + } + + /** + * Gets proxies for suppressed exceptions. + * + * @return proxies for suppressed exceptions. + */ + public ThrowableProxy[] getSuppressedProxies() { + return this.suppressedProxies; + } + + /** + * Format the suppressed Throwables. + * + * @return The formatted suppressed Throwables. + */ + public String getSuppressedStackTrace() { + final ThrowableProxy[] suppressed = this.getSuppressedProxies(); + if (suppressed == null || suppressed.length == 0) { + return Strings.EMPTY; + } + final StringBuilder sb = new StringBuilder("Suppressed Stack Trace Elements:").append(EOL); + for (final ThrowableProxy proxy : suppressed) { + sb.append(proxy.getExtendedStackTraceAsString()); + } + return sb.toString(); + } + + /** + * The throwable or null if this object is deserialized from XML or JSON. + * + * @return The throwable or null if this object is deserialized from XML or JSON. + */ + public Throwable getThrowable() { + return this.throwable; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (this.causeProxy == null ? 0 : this.causeProxy.hashCode()); + result = prime * result + this.commonElementCount; + result = prime * result + (this.extendedStackTrace == null ? 0 : Arrays.hashCode(this.extendedStackTrace)); + result = prime * result + (this.suppressedProxies == null ? 0 : Arrays.hashCode(this.suppressedProxies)); + result = prime * result + (this.name == null ? 0 : this.name.hashCode()); + return result; + } + + private boolean ignoreElement(final StackTraceElement element, final List ignorePackages) { + final String className = element.getClassName(); + for (final String pkg : ignorePackages) { + if (className.startsWith(pkg)) { + return true; + } + } + return false; + } + + /** + * Loads classes not located via Reflection.getCallerClass. + * + * @param lastLoader + * The ClassLoader that loaded the Class that called this Class. + * @param className + * The name of the Class. + * @return The Class object for the Class or null if it could not be located. + */ + private Class loadClass(final ClassLoader lastLoader, final String className) { + // XXX: this is overly complicated + Class clazz; + if (lastLoader != null) { + try { + clazz = Loader.initializeClass(className, lastLoader); + if (clazz != null) { + return clazz; + } + } catch (final Throwable ignore) { + // Ignore exception. + } + } + try { + clazz = Loader.loadClass(className); + } catch (final ClassNotFoundException ignored) { + return initializeClass(className); + } catch (final NoClassDefFoundError ignored) { + return initializeClass(className); + } catch (final SecurityException ignored) { + return initializeClass(className); + } + return clazz; + } + + private Class initializeClass(final String className) { + try { + return Loader.initializeClass(className, this.getClass().getClassLoader()); + } catch (final ClassNotFoundException ignore) { + return null; + } catch (final NoClassDefFoundError ignore) { + return null; + } catch (final SecurityException ignore) { + return null; + } + } + + /** + * Construct the CacheEntry from the Class's information. + * + * @param stackTraceElement + * The stack trace element + * @param callerClass + * The Class. + * @param exact + * True if the class was obtained via Reflection.getCallerClass. + * + * @return The CacheEntry. + */ + private CacheEntry toCacheEntry(final StackTraceElement stackTraceElement, final Class callerClass, + final boolean exact) { + String location = "?"; + String version = "?"; + ClassLoader lastLoader = null; + if (callerClass != null) { + try { + final CodeSource source = callerClass.getProtectionDomain().getCodeSource(); + if (source != null) { + final URL locationURL = source.getLocation(); + if (locationURL != null) { + final String str = locationURL.toString().replace('\\', '/'); + int index = str.lastIndexOf("/"); + if (index >= 0 && index == str.length() - 1) { + index = str.lastIndexOf("/", index - 1); + location = str.substring(index + 1); + } else { + location = str.substring(index + 1); + } + } + } + } catch (final Exception ex) { + // Ignore the exception. + } + final Package pkg = callerClass.getPackage(); + if (pkg != null) { + final String ver = pkg.getImplementationVersion(); + if (ver != null) { + version = ver; + } + } + lastLoader = callerClass.getClassLoader(); + } + return new CacheEntry(new ExtendedClassInfo(exact, location, version), lastLoader); + } + + /** + * Resolve all the stack entries in this stack trace that are not common with the parent. + * + * @param stack + * The callers Class stack. + * @param map + * The cache of CacheEntry objects. + * @param rootTrace + * The first stack trace resolve or null. + * @param stackTrace + * The stack trace being resolved. + * @return The StackTracePackageElement array. + */ + ExtendedStackTraceElement[] toExtendedStackTrace(final Stack> stack, final Map map, + final StackTraceElement[] rootTrace, final StackTraceElement[] stackTrace) { + int stackLength; + if (rootTrace != null) { + int rootIndex = rootTrace.length - 1; + int stackIndex = stackTrace.length - 1; + while (rootIndex >= 0 && stackIndex >= 0 && rootTrace[rootIndex].equals(stackTrace[stackIndex])) { + --rootIndex; + --stackIndex; + } + this.commonElementCount = stackTrace.length - 1 - stackIndex; + stackLength = stackIndex + 1; + } else { + this.commonElementCount = 0; + stackLength = stackTrace.length; + } + final ExtendedStackTraceElement[] extStackTrace = new ExtendedStackTraceElement[stackLength]; + Class clazz = stack.isEmpty() ? null : stack.peek(); + ClassLoader lastLoader = null; + for (int i = stackLength - 1; i >= 0; --i) { + final StackTraceElement stackTraceElement = stackTrace[i]; + final String className = stackTraceElement.getClassName(); + // The stack returned from getCurrentStack may be missing entries for java.lang.reflect.Method.invoke() + // and its implementation. The Throwable might also contain stack entries that are no longer + // present as those methods have returned. + ExtendedClassInfo extClassInfo; + if (clazz != null && className.equals(clazz.getName())) { + final CacheEntry entry = this.toCacheEntry(stackTraceElement, clazz, true); + extClassInfo = entry.element; + lastLoader = entry.loader; + stack.pop(); + clazz = stack.isEmpty() ? null : stack.peek(); + } else { + final CacheEntry cacheEntry = map.get(className); + if (cacheEntry != null) { + final CacheEntry entry = cacheEntry; + extClassInfo = entry.element; + if (entry.loader != null) { + lastLoader = entry.loader; + } + } else { + final CacheEntry entry = this.toCacheEntry(stackTraceElement, + this.loadClass(lastLoader, className), false); + extClassInfo = entry.element; + map.put(stackTraceElement.toString(), entry); + if (entry.loader != null) { + lastLoader = entry.loader; + } + } + } + extStackTrace[i] = new ExtendedStackTraceElement(stackTraceElement, extClassInfo); + } + return extStackTrace; + } + + @Override + public String toString() { + final String msg = this.message; + return msg != null ? this.name + ": " + msg : this.name; + } + + private ThrowableProxy[] toSuppressedProxies(final Throwable thrown, Set suppressedVisited) { + try { + final Throwable[] suppressed = thrown.getSuppressed(); + if (suppressed == null) { + return EMPTY_THROWABLE_PROXY_ARRAY; + } + final List proxies = new ArrayList<>(suppressed.length); + if (suppressedVisited == null) { + suppressedVisited = new HashSet<>(proxies.size()); + } + for (int i = 0; i < suppressed.length; i++) { + final Throwable candidate = suppressed[i]; + if (!suppressedVisited.contains(candidate)) { + suppressedVisited.add(candidate); + proxies.add(new ThrowableProxy(candidate, suppressedVisited)); + } + } + return proxies.toArray(new ThrowableProxy[proxies.size()]); + } catch (final Exception e) { + StatusLogger.getLogger().error(e); + } + return null; + } +} diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java index f8cf62164e9..c4cd5d908af 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java @@ -86,7 +86,7 @@ public class JarHell { } checkJarHell(parseClassPath()); } - + /** * Parses the classpath into an array of URLs * @return array of URLs @@ -277,6 +277,14 @@ public class JarHell { if (clazz.equals("org.joda.time.base.BaseDateTime")) { return; // apparently this is intentional... clean this up } + if (clazz.startsWith("org.apache.logging.log4j.core.impl.ThrowableProxy")) { + /* + * deliberate to hack around a bug in Log4j + * cf. https://github.com/elastic/elasticsearch/issues/20304 + * cf. https://issues.apache.org/jira/browse/LOG4J2-1560 + */ + return; + } throw new IllegalStateException("jar hell!" + System.lineSeparator() + "class: " + clazz + System.lineSeparator() + "jar1: " + previous + System.lineSeparator() + From 981e4f5bc5a83542471e96ed5902c667239541d4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 3 Sep 2016 06:41:07 -0400 Subject: [PATCH 36/40] Configure AWS SDK logging configuration Because of security permissions that we do not grant to the AWS SDK (for use in discovery-ec2 and repository-s3 plugins), certain calls in the AWS SDK will lead to security exceptions that are logged at the warning level. These warnings are noise and we should suppress them. This commit adds plugin log configurations for discovery-ec2 and repository-s3 to ship with default Log4j 2 configurations that suppress these log warnings. Relates #20313 --- plugins/discovery-ec2/build.gradle | 6 ++++++ .../discovery-ec2/config/discovery-ec2/log4j2.properties | 8 ++++++++ plugins/repository-s3/build.gradle | 6 ++++++ .../repository-s3/config/repository-s3/log4j2.properties | 8 ++++++++ 4 files changed, 28 insertions(+) create mode 100644 plugins/discovery-ec2/config/discovery-ec2/log4j2.properties create mode 100644 plugins/repository-s3/config/repository-s3/log4j2.properties diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 506215708e2..9ad2f8d02e2 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -42,6 +42,12 @@ dependencyLicenses { mapping from: /jackson-.*/, to: 'jackson' } +bundlePlugin { + from('config/discovery-ec2') { + into 'config' + } +} + test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties b/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties new file mode 100644 index 00000000000..aa52f0232e0 --- /dev/null +++ b/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties @@ -0,0 +1,8 @@ +logger.com_amazonaws.name = com.amazonaws +logger.com_amazonaws.level = warn + +logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport +logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error + +logger.com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics +logger.com_amazonaws_metrics_AwsSdkMetrics.level = error diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index a6610178ce8..b1369908670 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -48,6 +48,12 @@ dependencyLicenses { mapping from: /jaxb-.*/, to: 'jaxb' } +bundlePlugin { + from('config/repository-s3') { + into 'config' + } +} + test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/repository-s3/config/repository-s3/log4j2.properties b/plugins/repository-s3/config/repository-s3/log4j2.properties new file mode 100644 index 00000000000..3fee57ce3e2 --- /dev/null +++ b/plugins/repository-s3/config/repository-s3/log4j2.properties @@ -0,0 +1,8 @@ +logger.com_amazonaws.name = com.amazonaws +logger.com_amazonaws.level = warn + +logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport +logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error + +logger_com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics +logger_com_amazonaws_metrics_AwsSdkMetrics.level = error From e297fd419bba60b7dfa76eba2036966275ff6bae Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 3 Sep 2016 08:23:18 -0400 Subject: [PATCH 37/40] Workaround possible JVM bug on Windows Some assertions in MaxMapCountCheckTests assert that certain messages are logged. These assertions pass everywhere except Windows where the JVM seems confused. The issue is not the javac compiler as the bytecode produced on OS X and Windows is identical for the relevant classes so this leaves a possible JVM bug. It is not worth investigating the ultimate cause of this bug so instead this commit introduces a workaround. --- .../bootstrap/MaxMapCountCheckTests.java | 120 +++++++++++++----- 1 file changed, 90 insertions(+), 30 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index cbfc14a7d04..5a2b8a5a143 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -19,25 +19,25 @@ package org.elasticsearch.bootstrap; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.logging.TestLoggers; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matcher; -import org.mockito.ArgumentMatcher; +import org.elasticsearch.test.MockLogAppender; import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Path; +import java.util.Arrays; +import java.util.function.Predicate; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.argThat; -import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; @@ -52,8 +52,7 @@ public class MaxMapCountCheckTests extends ESTestCase { } } - @SuppressLoggerChecks(reason = "mock usage") - public void testGetMaxMapCount() throws IOException { + public void testGetMaxMapCount() throws IOException, IllegalAccessException { final long procSysVmMaxMapCount = randomIntBetween(1, Integer.MAX_VALUE); final BufferedReader reader = mock(BufferedReader.class); when(reader.readLine()).thenReturn(Long.toString(procSysVmMaxMapCount)); @@ -69,31 +68,92 @@ public class MaxMapCountCheckTests extends ESTestCase { assertThat(check.getMaxMapCount(), equalTo(procSysVmMaxMapCount)); verify(reader).close(); - reset(reader); - final IOException ioException = new IOException("fatal"); - when(reader.readLine()).thenThrow(ioException); - final Logger logger = mock(Logger.class); - assertThat(check.getMaxMapCount(logger), equalTo(-1L)); - verify(logger).warn(argThat(argumentMatcher("I/O exception while trying to read [/proc/sys/vm/max_map_count]")), eq(ioException)); - verify(reader).close(); + { + reset(reader); + final IOException ioException = new IOException("fatal"); + when(reader.readLine()).thenThrow(ioException); + final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountIOException"); + final MockLogAppender appender = new MockLogAppender(); + appender.addExpectation( + new ParameterizedMessageLoggingExpectation( + "expected logged I/O exception", + "testGetMaxMapCountIOException", + Level.WARN, + "I/O exception while trying to read [{}]", + new Object[] { procSysVmMaxMapCountPath }, + e -> ioException == e)); + TestLoggers.addAppender(logger, appender); + assertThat(check.getMaxMapCount(logger), equalTo(-1L)); + appender.assertAllExpectationsMatched(); + verify(reader).close(); + TestLoggers.removeAppender(logger, appender); + } + + { + reset(reader); + when(reader.readLine()).thenReturn("eof"); + final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountNumberFormatException"); + final MockLogAppender appender = new MockLogAppender(); + appender.addExpectation( + new ParameterizedMessageLoggingExpectation( + "expected logged number format exception", + "testGetMaxMapCountNumberFormatException", + Level.WARN, + "unable to parse vm.max_map_count [{}]", + new Object[] { "eof" }, + e -> e instanceof NumberFormatException && e.getMessage().equals("For input string: \"eof\""))); + TestLoggers.addAppender(logger, appender); + assertThat(check.getMaxMapCount(logger), equalTo(-1L)); + appender.assertAllExpectationsMatched(); + verify(reader).close(); + TestLoggers.removeAppender(logger, appender); + } - reset(reader); - reset(logger); - when(reader.readLine()).thenReturn("eof"); - assertThat(check.getMaxMapCount(logger), equalTo(-1L)); - verify(logger).warn(argThat(argumentMatcher("unable to parse vm.max_map_count [eof]")), any(NumberFormatException.class)); - verify(reader).close(); } - private ArgumentMatcher> argumentMatcher(final String message) { - return new ArgumentMatcher>() { - @Override - public boolean matches(Object o) { - final Supplier supplier = (Supplier)o; - final ParameterizedMessage parameterizedMessage = (ParameterizedMessage) supplier.get(); - return parameterizedMessage.getFormattedMessage().equals(message); + private static class ParameterizedMessageLoggingExpectation implements MockLogAppender.LoggingExpectation { + + private boolean saw = false; + + private final String name; + private final String loggerName; + private final Level level; + private final String messagePattern; + private final Object[] arguments; + private final Predicate throwablePredicate; + + private ParameterizedMessageLoggingExpectation( + final String name, + final String loggerName, + final Level level, + final String messagePattern, + final Object[] arguments, + final Predicate throwablePredicate) { + this.name = name; + this.loggerName = loggerName; + this.level = level; + this.messagePattern = messagePattern; + this.arguments = arguments; + this.throwablePredicate = throwablePredicate; + } + + @Override + public void match(LogEvent event) { + if (event.getLevel().equals(level) && + event.getLoggerName().equals(loggerName) && + event.getMessage() instanceof ParameterizedMessage) { + final ParameterizedMessage message = (ParameterizedMessage)event.getMessage(); + saw = message.getFormat().equals(messagePattern) && + Arrays.deepEquals(arguments, message.getParameters()) && + throwablePredicate.test(event.getThrown()); } - }; + } + + @Override + public void assertMatched() { + assertTrue(name, saw); + } + } public void testMaxMapCountCheckRead() throws IOException { From 41637a12949998184410555dd0630944c0d2ed11 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 3 Sep 2016 09:48:09 -0400 Subject: [PATCH 38/40] Only warn on old log configs if resolving configs A warning was introduced if old log config files are present (e.g., logging.yml). However, this check is executed unconditionally. This can lead to no such file exceptions when logging configs are not being resolved, for example when installing a plugin. This commit moves this check to only execute when logging configs are being resolved. --- .../java/org/elasticsearch/common/logging/LogConfigurator.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 522ff589758..68e2886cf37 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -78,6 +78,7 @@ public class LogConfigurator { } }); context.start(new CompositeConfiguration(configurations)); + warnIfOldConfigurationFilePresent(environment); } if (ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) { @@ -89,8 +90,6 @@ public class LogConfigurator { final Level level = ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings); Loggers.setLevel(Loggers.getLogger(key.substring("logger.".length())), level); } - - warnIfOldConfigurationFilePresent(environment); } private static void warnIfOldConfigurationFilePresent(final Environment environment) throws IOException { From 433cae47ed7834397bf854536ecdde77505a0cad Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 4 Sep 2016 11:09:08 -0400 Subject: [PATCH 39/40] Mark CSIT#testLoggerLevelUpdate as awaits fix This commit marks ClusterSettingsIT#testLoggerLevelUpdate as awaiting a fix due to a test bug. --- .../org/elasticsearch/cluster/settings/ClusterSettingsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 7e69003a1ea..26bb97fcc04 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -330,6 +330,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/20318") public void testLoggerLevelUpdate() { assertAcked(prepareCreate("test")); final IllegalArgumentException e = From 6f6d17dc9c6dfc83e80f5da70de07010ac1a59ba Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 29 Aug 2016 16:03:01 +0200 Subject: [PATCH 40/40] ingest: Add `dot_expander` processor that can turn fields with dots in the field name into object fields. --- docs/reference/ingest/ingest-node.asciidoc | 112 ++++++++++++++ .../ingest/common/DotExpanderProcessor.java | 120 +++++++++++++++ .../ingest/common/IngestCommonPlugin.java | 1 + .../DotExpanderProcessorFactoryTests.java | 100 ++++++++++++ .../common/DotExpanderProcessorTests.java | 144 ++++++++++++++++++ .../rest-api-spec/test/ingest/10_basic.yaml | 29 ++-- .../test/ingest/130_escape_dot.yaml | 40 +++++ 7 files changed, 532 insertions(+), 14 deletions(-) create mode 100644 modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java create mode 100644 modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java create mode 100644 modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java create mode 100644 modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index f0c0e9f6c13..a06e3b9e1df 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1495,3 +1495,115 @@ Converts a string to its uppercase equivalent. } } -------------------------------------------------- + +[[dot-expand-processor]] +=== Dot Expander Processor + +Expands a field with dots into an object field. This processor allows fields +with dots in the name to be accessible by other processors in the pipeline. +Otherwise these < can't be accessed by any processor. + +[[dot-expender-options]] +.Dot Expand Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to expand into an object field +| `path` | no | - | The field that contains the field to expand. Only required if the field to expand is part another object field, because the `field` option can only understand leaf fields. +|====== + +[source,js] +-------------------------------------------------- +{ + "dot_expander": { + "field": "foo.bar" + } +} +-------------------------------------------------- + +For example the dot expand processor would turn this document: + +[source,js] +-------------------------------------------------- +{ + "foo.bar" : "value" +} +-------------------------------------------------- + +into: + +[source,js] +-------------------------------------------------- +{ + "foo" : { + "bar" : "value" + } +} +-------------------------------------------------- + +If there is already a `bar` field nested under `foo` then +this processor merges the the `foo.bar` field into it. If the field is +a scalar value then it will turn that field into an array field. + +For example, the following document: + +[source,js] +-------------------------------------------------- +{ + "foo.bar" : "value2", + "foo" : { + "bar" : "value1" + } +} +-------------------------------------------------- + +is transformed by the `dot_expander` processor into: + +[source,js] +-------------------------------------------------- +{ + "foo" : { + "bar" : ["value1", "value2"] + } +} +-------------------------------------------------- + +If any field outside of the leaf field conflicts with a pre-existing field of the same name, +then that field needs to be renamed first. + +Consider the following document: + +[source,js] +-------------------------------------------------- +{ + "foo": "value1", + "foo.bar": "value2" +} +-------------------------------------------------- + +Then the the `foo` needs to be renamed first before the `dot_expander` +processor is applied. So in order for the `foo.bar` field to properly +be expanded into the `bar` field under the `foo` field the following +pipeline should be used: + +[source,js] +-------------------------------------------------- +{ + "processors" : [ + { + "rename" : { + "field" : "foo", + "target_field" : "foo.bar"" + } + }, + { + "dot_expander": { + "field": "foo.bar" + } + } + ] +} +-------------------------------------------------- + +The reason for this is that Ingest doesn't know how to automatically cast +a scalar field to an object field. \ No newline at end of file diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java new file mode 100644 index 00000000000..bfc32311733 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; + +import java.util.Map; + +public final class DotExpanderProcessor extends AbstractProcessor { + + static final String TYPE = "dot_expander"; + + private final String path; + private final String field; + + DotExpanderProcessor(String tag, String path, String field) { + super(tag); + this.path = path; + this.field = field; + } + + @Override + @SuppressWarnings("unchecked") + public void execute(IngestDocument ingestDocument) throws Exception { + String path; + Map map; + if (this.path != null) { + path = this.path + "." + field; + map = ingestDocument.getFieldValue(this.path, Map.class); + } else { + path = field; + map = ingestDocument.getSourceAndMetadata(); + } + + if (ingestDocument.hasField(path)) { + Object value = map.remove(field); + ingestDocument.appendFieldValue(path, value); + } else { + // check whether we actually can expand the field in question into an object field. + // part of the path may already exist and if part of it would be a value field (string, integer etc.) + // then we can't override it with an object field and we should fail with a good reason. + // IngestDocument#setFieldValue(...) would fail too, but the error isn't very understandable + for (int index = path.indexOf('.'); index != -1; index = path.indexOf('.', index + 1)) { + String partialPath = path.substring(0, index); + if (ingestDocument.hasField(partialPath)) { + Object val = ingestDocument.getFieldValue(partialPath, Object.class); + if ((val instanceof Map) == false) { + throw new IllegalArgumentException("cannot expend [" + path + "], because [" + partialPath + + "] is not an object field, but a value field"); + } + } else { + break; + } + } + Object value = map.remove(field); + ingestDocument.setFieldValue(path, value); + } + } + + @Override + public String getType() { + return TYPE; + } + + String getPath() { + return path; + } + + String getField() { + return field; + } + + public static final class Factory implements Processor.Factory { + + @Override + public Processor create(Map processorFactories, String tag, + Map config) throws Exception { + String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); + if (field.contains(".") == false) { + throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", + "field does not contain a dot"); + } + if (field.indexOf('.') == 0 || field.lastIndexOf('.') == field.length() - 1) { + throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", + "Field can't start or end with a dot"); + } + int firstIndex = -1; + for (int index = field.indexOf('.'); index != -1; index = field.indexOf('.', index + 1)) { + if (index - firstIndex == 1) { + throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", + "No space between dots"); + } + firstIndex = index; + } + + String path = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "path"); + return new DotExpanderProcessor(tag, path, field); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index c89f6164de7..e6948771d8d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -61,6 +61,7 @@ public class IngestCommonPlugin extends Plugin implements IngestPlugin { processors.put(SortProcessor.TYPE, new SortProcessor.Factory()); processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(builtinPatterns)); processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)); + processors.put(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory()); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java new file mode 100644 index 00000000000..be0695924ef --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class DotExpanderProcessorFactoryTests extends ESTestCase { + + public void testCreate() throws Exception { + DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); + + Map config = new HashMap<>(); + config.put("field", "_field.field"); + config.put("path", "_path"); + DotExpanderProcessor processor = (DotExpanderProcessor) factory.create(null, "_tag", config); + assertThat(processor.getField(), equalTo("_field.field")); + assertThat(processor.getPath(), equalTo("_path")); + + config = new HashMap<>(); + config.put("field", "_field.field"); + processor = (DotExpanderProcessor) factory.create(null, "_tag", config); + assertThat(processor.getField(), equalTo("_field.field")); + assertThat(processor.getPath(), nullValue()); + } + + public void testValidFields() throws Exception { + DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); + + String[] fields = new String[] {"a.b", "a.b.c", "a.b.c.d", "ab.cd"}; + for (String field : fields) { + Map config = new HashMap<>(); + config.put("field", field); + config.put("path", "_path"); + DotExpanderProcessor processor = (DotExpanderProcessor) factory.create(null, "_tag", config); + assertThat(processor.getField(), equalTo(field)); + assertThat(processor.getPath(), equalTo("_path")); + } + } + + public void testCreate_fieldMissing() throws Exception { + DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); + + Map config = new HashMap<>(); + config.put("path", "_path"); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config)); + assertThat(e.getMessage(), equalTo("[field] required property is missing")); + } + + public void testCreate_invalidFields() throws Exception { + DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); + String[] fields = new String[] {"a", "abc"}; + for (String field : fields) { + Map config = new HashMap<>(); + config.put("field", field); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config)); + assertThat(e.getMessage(), equalTo("[field] field does not contain a dot")); + } + + fields = new String[] {".a", "a.", "."}; + for (String field : fields) { + Map config = new HashMap<>(); + config.put("field", field); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config)); + assertThat(e.getMessage(), equalTo("[field] Field can't start or end with a dot")); + } + + fields = new String[] {"a..b", "a...b", "a.b..c", "abc.def..hij"}; + for (String field : fields) { + Map config = new HashMap<>(); + config.put("field", field); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config)); + assertThat(e.getMessage(), equalTo("[field] No space between dots")); + } + } + +} diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java new file mode 100644 index 00000000000..1802090e0e5 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class DotExpanderProcessorTests extends ESTestCase { + + public void testEscapeFields() throws Exception { + Map source = new HashMap<>(); + source.put("foo.bar", "baz1"); + IngestDocument document = new IngestDocument(source, Collections.emptyMap()); + DotExpanderProcessor processor = new DotExpanderProcessor("_tag", null, "foo.bar"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", String.class), equalTo("baz1")); + + source = new HashMap<>(); + source.put("foo.bar.baz", "value"); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", null, "foo.bar.baz"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("value")); + + source = new HashMap<>(); + source.put("foo.bar", "baz1"); + source.put("foo", new HashMap<>(Collections.singletonMap("bar", "baz2"))); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", null, "foo.bar"); + processor.execute(document); + assertThat(document.getSourceAndMetadata().size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", List.class).size(), equalTo(2)); + assertThat(document.getFieldValue("foo.bar.0", String.class), equalTo("baz2")); + assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("baz1")); + + source = new HashMap<>(); + source.put("foo.bar", "2"); + source.put("foo", new HashMap<>(Collections.singletonMap("bar", 1))); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", null, "foo.bar"); + processor.execute(document); + assertThat(document.getSourceAndMetadata().size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", List.class).size(), equalTo(2)); + assertThat(document.getFieldValue("foo.bar.0", Integer.class), equalTo(1)); + assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("2")); + } + + public void testEscapeFields_valueField() throws Exception { + Map source = new HashMap<>(); + source.put("foo.bar", "baz1"); + source.put("foo", "baz2"); + IngestDocument document1 = new IngestDocument(source, Collections.emptyMap()); + Processor processor1 = new DotExpanderProcessor("_tag", null, "foo.bar"); + // foo already exists and if a leaf field and therefor can't be replaced by a map field: + Exception e = expectThrows(IllegalArgumentException.class, () -> processor1.execute(document1)); + assertThat(e.getMessage(), equalTo("cannot expend [foo.bar], because [foo] is not an object field, but a value field")); + + // so because foo is no branch field but a value field the `foo.bar` field can't be expanded + // into [foo].[bar], so foo should be renamed first into `[foo].[bar]: + IngestDocument document = new IngestDocument(source, Collections.emptyMap()); + Processor processor = new RenameProcessor("_tag", "foo", "foo.bar"); + processor.execute(document); + processor = new DotExpanderProcessor("_tag", null, "foo.bar"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar.0", String.class), equalTo("baz2")); + assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("baz1")); + + source = new HashMap<>(); + source.put("foo.bar", "baz1"); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", null, "foo.bar"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", String.class), equalTo("baz1")); + + source = new HashMap<>(); + source.put("foo.bar.baz", "baz1"); + source.put("foo", new HashMap<>(Collections.singletonMap("bar", new HashMap<>()))); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", null, "foo.bar.baz"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("baz1")); + + source = new HashMap<>(); + source.put("foo.bar.baz", "baz1"); + source.put("foo", new HashMap<>(Collections.singletonMap("bar", "baz2"))); + IngestDocument document2 = new IngestDocument(source, Collections.emptyMap()); + Processor processor2 = new DotExpanderProcessor("_tag", null, "foo.bar.baz"); + e = expectThrows(IllegalArgumentException.class, () -> processor2.execute(document2)); + assertThat(e.getMessage(), equalTo("cannot expend [foo.bar.baz], because [foo.bar] is not an object field, but a value field")); + } + + public void testEscapeFields_path() throws Exception { + Map source = new HashMap<>(); + source.put("foo", new HashMap<>(Collections.singletonMap("bar.baz", "value"))); + IngestDocument document = new IngestDocument(source, Collections.emptyMap()); + DotExpanderProcessor processor = new DotExpanderProcessor("_tag", "foo", "bar.baz"); + processor.execute(document); + assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("value")); + + source = new HashMap<>(); + source.put("field", new HashMap<>(Collections.singletonMap("foo.bar.baz", "value"))); + document = new IngestDocument(source, Collections.emptyMap()); + processor = new DotExpanderProcessor("_tag", "field", "foo.bar.baz"); + processor.execute(document); + assertThat(document.getFieldValue("field.foo", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("field.foo.bar", Map.class).size(), equalTo(1)); + assertThat(document.getFieldValue("field.foo.bar.baz", String.class), equalTo("value")); + } + +} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml index 14f58369dfa..e37b2d83183 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml @@ -13,17 +13,18 @@ - match: { nodes.$master.ingest.processors.1.type: convert } - match: { nodes.$master.ingest.processors.2.type: date } - match: { nodes.$master.ingest.processors.3.type: date_index_name } - - match: { nodes.$master.ingest.processors.4.type: fail } - - match: { nodes.$master.ingest.processors.5.type: foreach } - - match: { nodes.$master.ingest.processors.6.type: grok } - - match: { nodes.$master.ingest.processors.7.type: gsub } - - match: { nodes.$master.ingest.processors.8.type: join } - - match: { nodes.$master.ingest.processors.9.type: lowercase } - - match: { nodes.$master.ingest.processors.10.type: remove } - - match: { nodes.$master.ingest.processors.11.type: rename } - - match: { nodes.$master.ingest.processors.12.type: script } - - match: { nodes.$master.ingest.processors.13.type: set } - - match: { nodes.$master.ingest.processors.14.type: sort } - - match: { nodes.$master.ingest.processors.15.type: split } - - match: { nodes.$master.ingest.processors.16.type: trim } - - match: { nodes.$master.ingest.processors.17.type: uppercase } + - match: { nodes.$master.ingest.processors.4.type: dot_expander } + - match: { nodes.$master.ingest.processors.5.type: fail } + - match: { nodes.$master.ingest.processors.6.type: foreach } + - match: { nodes.$master.ingest.processors.7.type: grok } + - match: { nodes.$master.ingest.processors.8.type: gsub } + - match: { nodes.$master.ingest.processors.9.type: join } + - match: { nodes.$master.ingest.processors.10.type: lowercase } + - match: { nodes.$master.ingest.processors.11.type: remove } + - match: { nodes.$master.ingest.processors.12.type: rename } + - match: { nodes.$master.ingest.processors.13.type: script } + - match: { nodes.$master.ingest.processors.14.type: set } + - match: { nodes.$master.ingest.processors.15.type: sort } + - match: { nodes.$master.ingest.processors.16.type: split } + - match: { nodes.$master.ingest.processors.17.type: trim } + - match: { nodes.$master.ingest.processors.18.type: uppercase } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml new file mode 100644 index 00000000000..1d537ffa6b7 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml @@ -0,0 +1,40 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + +--- +"Test escape_dot processor": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "dot_expander" : { + "field" : "foo.bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "1" + body: { + foo.bar: "baz" + } + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.foo.bar: "baz" }