From c2e3bebab997367520d18761288ffea5a9a715f1 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Mon, 30 Jul 2018 22:15:59 +0300 Subject: [PATCH 01/22] Ensure KeyStoreWrapper decryption exceptions are handled (#32464) * Ensure decryption related exceptions are handled This commit ensures that all possible Exceptions in KeyStoreWrapper#decrypt() are handled. More specifically, in the case that a wrong password is used for secure settings, calling readX on the DataInputStream that wraps the CipherInputStream can throw an IOException. It also adds a test for loading a KeyStoreWrapper with a wrong password. Resolves #32411 --- .../elasticsearch/common/settings/KeyStoreWrapper.java | 2 +- .../action/admin/ReloadSecureSettingsIT.java | 8 -------- .../common/settings/KeyStoreWrapperTests.java | 9 +++++++++ 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index eee45743ee3..e017e9e7ca9 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -359,7 +359,7 @@ public class KeyStoreWrapper implements SecureSettings { if (input.read() != -1) { throw new SecurityException("Keystore has been corrupted or tampered with"); } - } catch (EOFException e) { + } catch (IOException e) { throw new SecurityException("Keystore has been corrupted or tampered with", e); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index c8503603f66..79527582405 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.StandardCopyOption; @@ -205,14 +204,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { assertThat(nodesMap.size(), equalTo(cluster().size())); for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { assertThat(nodeResponse.reloadException(), notNullValue()); - // Running in a JVM with a BouncyCastle FIPS Security Provider, decrypting the Keystore with the wrong - // password returns a SecurityException if the DataInputStream can't be fully consumed - if (inFipsJvm()) { assertThat(nodeResponse.reloadException(), instanceOf(SecurityException.class)); - } else { - assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); - } - } } catch (final AssertionError e) { reloadSettingsError.set(e); diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index fe7b02d63ec..bb2b1df7f8c 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -99,6 +99,15 @@ public class KeyStoreWrapperTests extends ESTestCase { assertTrue(keystore.getSettingNames().contains(KeyStoreWrapper.SEED_SETTING.getKey())); } + public void testDecryptKeyStoreWithWrongPassword() throws Exception { + KeyStoreWrapper keystore = KeyStoreWrapper.create(); + keystore.save(env.configFile(), new char[0]); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + final SecurityException exception = expectThrows(SecurityException.class, + () -> loadedkeystore.decrypt(new char[]{'i', 'n', 'v', 'a', 'l', 'i', 'd'})); + assertThat(exception.getMessage(), containsString("Keystore has been corrupted or tampered with")); + } + public void testCannotReadStringFromClosedKeystore() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); From cf7489899a957764404769a4b792cdbefbf6a729 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 30 Jul 2018 21:25:30 +0200 Subject: [PATCH 02/22] INGEST: Clean up Java8 Stream Usage (#32059) * GrokProcessor: Rationalize the loop over the map to save allocations and indirection * IngestDocument: Rationalize way we append to `List` --- .../java/org/elasticsearch/ingest/common/GrokProcessor.java | 3 +-- .../src/main/java/org/elasticsearch/ingest/IngestDocument.java | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java index 7bb3ebfba6e..88cba512b86 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java @@ -68,8 +68,7 @@ public final class GrokProcessor extends AbstractProcessor { throw new IllegalArgumentException("Provided Grok expressions do not match field value: [" + fieldValue + "]"); } - matches.entrySet().stream() - .forEach((e) -> ingestDocument.setFieldValue(e.getKey(), e.getValue())); + matches.forEach(ingestDocument::setFieldValue); if (traceMatch) { if (matchPatterns.size() > 1) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 2bd842e72b1..aad55e12cef 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -527,8 +527,7 @@ public final class IngestDocument { private static void appendValues(List list, Object value) { if (value instanceof List) { - List valueList = (List) value; - valueList.stream().forEach(list::add); + list.addAll((List) value); } else { list.add(value); } From 072c0be8af0bdbe9e035e4da6bd8e3831fe1796a Mon Sep 17 00:00:00 2001 From: w-bonelli Date: Mon, 30 Jul 2018 15:48:14 -0400 Subject: [PATCH 03/22] Update Fuzzy Query docs to clarify default behavior re max_expansions (#30819) Stating that the Fuzzy Query generates "all possible" matching terms is misleading, given that the query's default behavior is to generate a maximum of 50 matching terms. (cherry picked from commit 345a0071a2a41fd7f80ae9ef8a39a2cb4991aedd) --- docs/reference/query-dsl/fuzzy-query.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index fa11cb257d4..4be54691624 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -5,10 +5,10 @@ The fuzzy query uses similarity based on Levenshtein edit distance. ==== String fields -The `fuzzy` query generates all possible matching terms that are within the +The `fuzzy` query generates matching terms that are within the maximum edit distance specified in `fuzziness` and then checks the term dictionary to find out which of those generated terms actually exist in the -index. +index. The final query uses up to `max_expansions` matching terms. Here is a simple example: From 1e0fcebfe1227401343e879cb07fd212c20fe99e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Mon, 30 Jul 2018 14:32:55 -0700 Subject: [PATCH 04/22] update rollover to leverage write-alias semantics (#32216) Rollover should not swap aliases when `is_write_index` is set to `true`. Instead, both the new and old indices should have the rollover alias, with the newly created index as the new write index Updates Rollover to leverage the ability to preserve aliases and swap which is the write index. Historically, Rollover would swap which index had the designated alias for writing documents against. This required users to keep a separate read-alias that enabled reading against both rolled over and newly created indices, whiles the write-alias was being re-assigned at every rollover. With the ability for aliases to designate a write index, Rollover can be a bit more flexible with its use of aliases. Updates include: - Rollover validates that the target alias has a write index (the index that is being rolled over). This means that the restriction that aliases only point to one index is no longer necessary. - Rollover explicitly (and atomically) swaps which index is the write-index by explicitly assigning the existing index to have `is_write_index: false` and have the newly created index have its rollover alias as `is_write_index: true`. This is only done when `is_write_index: true` on the write index. Default behavior of removing the alias from the rolled over index stays when `is_write_index` is not explicitly set Relevant things that are staying the same: - Rollover is rejected if there exist any templates that match the newly-created index and configure the rollover-alias - I think this existed to prevent the situation where an alias pointed to two indices for a short while. Although this can technically be relaxed, the specific cases that are safe are really particular and difficult to reason, so leaving the broad restriction sounds good --- docs/reference/indices/aliases.asciidoc | 3 + .../reference/indices/rollover-index.asciidoc | 112 +++++++++++++++++- .../rollover/TransportRolloverAction.java | 38 ++++-- .../admin/indices/rollover/RolloverIT.java | 90 ++++++++++++-- .../TransportRolloverActionTests.java | 68 ++++++++--- 5 files changed, 274 insertions(+), 37 deletions(-) diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 8e0e84c6657..81a96510e46 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -257,6 +257,9 @@ and there are multiple indices referenced by an alias, then writes will not be a It is possible to specify an index associated with an alias as a write index using both the aliases API and index creation API. +Setting an index to be the write index with an alias also affects how the alias is manipulated during +Rollover (see <>). + [source,js] -------------------------------------------------- POST /_aliases diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 1e3a361f1b1..1730c95e0dd 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -4,10 +4,19 @@ The rollover index API rolls an alias over to a new index when the existing index is considered to be too large or too old. -The API accepts a single alias name and a list of `conditions`. The alias -must point to a single index only. If the index satisfies the specified -conditions then a new index is created and the alias is switched to point to -the new index. +The API accepts a single alias name and a list of `conditions`. The alias must point to a write index for +a Rollover request to be valid. There are two ways this can be achieved, and depending on the configuration, the +alias metadata will be updated differently. The two scenarios are as follows: + + - The alias only points to a single index with `is_write_index` not configured (defaults to `null`). + +In this scenario, the original index will have their rollover alias will be added to the newly created index, and removed +from the original (rolled-over) index. + + - The alias points to one or more indices with `is_write_index` set to `true` on the index to be rolled over (the write index). + +In this scenario, the write index will have its rollover alias' `is_write_index` set to `false`, while the newly created index +will now have the rollover alias pointing to it as the write index with `is_write_index` as `true`. [source,js] @@ -231,3 +240,98 @@ POST /logs_write/_rollover?dry_run Because the rollover operation creates a new index to rollover to, the <> setting on index creation applies to the rollover action as well. + +[[indices-rollover-is-write-index]] +[float] +=== Write Index Alias Behavior + +The rollover alias when rolling over a write index that has `is_write_index` explicitly set to `true` is not +swapped during rollover actions. Since having an alias point to multiple indices is ambiguous in distinguishing +which is the correct write index to roll over, it is not valid to rollover an alias that points to multiple indices. +For this reason, the default behavior is to swap which index is being pointed to by the write-oriented alias. This +was `logs_write` in some of the above examples. Since setting `is_write_index` enables an alias to point to multiple indices +while also being explicit as to which is the write index that rollover should target, removing the alias from the rolled over +index is not necessary. This simplifies things by allowing for one alias to behave both as the write and read aliases for +indices that are being managed with Rollover. + +Look at the behavior of the aliases in the following example where `is_write_index` is set on the rolled over index. + +[source,js] +-------------------------------------------------- +PUT my_logs_index-000001 +{ + "aliases": { + "logs": { "is_write_index": true } <1> + } +} + +PUT logs/_doc/1 +{ + "message": "a dummy log" +} + +POST logs/_refresh + +POST /logs/_rollover +{ + "conditions": { + "max_docs": "1" + } +} + +PUT logs/_doc/2 <2> +{ + "message": "a newer log" +} +-------------------------------------------------- +// CONSOLE +<1> configures `my_logs_index` as the write index for the `logs` alias +<2> newly indexed documents against the `logs` alias will write to the new index + +[source,js] +-------------------------------------------------- +{ + "_index" : "my_logs_index-000002", + "_type" : "_doc", + "_id" : "2", + "_version" : 1, + "result" : "created", + "_shards" : { + "total" : 2, + "successful" : 1, + "failed" : 0 + }, + "_seq_no" : 0, + "_primary_term" : 1 +} +-------------------------------------------------- +// TESTRESPONSE + +////////////////////////// +[source,js] +-------------------------------------------------- +GET _alias +-------------------------------------------------- +// CONSOLE +// TEST[continued] +////////////////////////// + +After the rollover, the alias metadata for the two indices will have the `is_write_index` setting +reflect each index's role, with the newly created index as the write index. + +[source,js] +-------------------------------------------------- +{ + "my_logs_index-000002": { + "aliases": { + "logs": { "is_write_index": true } + } + }, + "my_logs_index-000001": { + "aliases": { + "logs": { "is_write_index" : false } + } + } +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 3fa046263af..ab05d069003 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -109,8 +109,9 @@ public class TransportRolloverAction extends TransportMasterNodeAction listener) { final MetaData metaData = state.metaData(); validate(metaData, rolloverRequest); - final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(rolloverRequest.getAlias()); - final IndexMetaData indexMetaData = aliasOrIndex.getIndices().get(0); + final AliasOrIndex.Alias alias = (AliasOrIndex.Alias) metaData.getAliasAndIndexLookup().get(rolloverRequest.getAlias()); + final IndexMetaData indexMetaData = alias.getWriteIndex(); + final boolean explicitWriteIndex = Boolean.TRUE.equals(indexMetaData.getAliases().get(alias.getAliasName()).writeIndex()); final String sourceProvidedName = indexMetaData.getSettings().get(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, indexMetaData.getIndex().getName()); final String sourceIndexName = indexMetaData.getIndex().getName(); @@ -138,10 +139,15 @@ public class TransportRolloverAction extends TransportMasterNodeAction { - // switch the alias to point to the newly created index - indexAliasesService.indicesAliases( - prepareRolloverAliasesUpdateRequest(sourceIndexName, rolloverIndexName, - rolloverRequest), + final IndicesAliasesClusterStateUpdateRequest aliasesUpdateRequest; + if (explicitWriteIndex) { + aliasesUpdateRequest = prepareRolloverAliasesWriteIndexUpdateRequest(sourceIndexName, + rolloverIndexName, rolloverRequest); + } else { + aliasesUpdateRequest = prepareRolloverAliasesUpdateRequest(sourceIndexName, + rolloverIndexName, rolloverRequest); + } + indexAliasesService.indicesAliases(aliasesUpdateRequest, ActionListener.wrap(aliasClusterStateUpdateResponse -> { if (aliasClusterStateUpdateResponse.isAcknowledged()) { clusterService.submitStateUpdateTask("update_rollover_info", new ClusterStateUpdateTask() { @@ -196,8 +202,19 @@ public class TransportRolloverAction extends TransportMasterNodeAction actions = unmodifiableList(Arrays.asList( - new AliasAction.Add(newIndex, request.getAlias(), null, null, null, null), - new AliasAction.Remove(oldIndex, request.getAlias()))); + new AliasAction.Add(newIndex, request.getAlias(), null, null, null, null), + new AliasAction.Remove(oldIndex, request.getAlias()))); + final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) + .ackTimeout(request.ackTimeout()) + .masterNodeTimeout(request.masterNodeTimeout()); + return updateRequest; + } + + static IndicesAliasesClusterStateUpdateRequest prepareRolloverAliasesWriteIndexUpdateRequest(String oldIndex, String newIndex, + RolloverRequest request) { + List actions = unmodifiableList(Arrays.asList( + new AliasAction.Add(newIndex, request.getAlias(), null, null, null, true), + new AliasAction.Add(oldIndex, request.getAlias(), null, null, null, false))); final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) .ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()); @@ -244,8 +261,9 @@ public class TransportRolloverAction extends TransportMasterNodeAction client().admin().indices().prepareRolloverIndex("alias").dryRun(randomBoolean()).get()); + assertThat(exception.getMessage(), equalTo("source alias [alias] does not point to a write index")); + } + public void testRolloverWithIndexSettings() throws Exception { - assertAcked(prepareCreate("test_index-2").addAlias(new Alias("test_alias")).get()); + Alias testAlias = new Alias("test_alias"); + boolean explicitWriteIndex = randomBoolean(); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index-2").addAlias(testAlias).get()); index("test_index-2", "type1", "1", "field", "value"); flush("test_index-2"); final Settings settings = Settings.builder() @@ -114,12 +165,17 @@ public class RolloverIT extends ESIntegTestCase { assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-2"); - assertFalse(oldIndex.getAliases().containsKey("test_alias")); final IndexMetaData newIndex = state.metaData().index("test_index-000003"); assertThat(newIndex.getNumberOfShards(), equalTo(1)); assertThat(newIndex.getNumberOfReplicas(), equalTo(0)); assertTrue(newIndex.getAliases().containsKey("test_alias")); assertTrue(newIndex.getAliases().containsKey("extra_alias")); + if (explicitWriteIndex) { + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + assertTrue(newIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertFalse(oldIndex.getAliases().containsKey("test_alias")); + } } public void testRolloverDryRun() throws Exception { @@ -140,7 +196,12 @@ public class RolloverIT extends ESIntegTestCase { } public void testRolloverConditionsNotMet() throws Exception { - assertAcked(prepareCreate("test_index-0").addAlias(new Alias("test_alias")).get()); + boolean explicitWriteIndex = randomBoolean(); + Alias testAlias = new Alias("test_alias"); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index-0").addAlias(testAlias).get()); index("test_index-0", "type1", "1", "field", "value"); flush("test_index-0"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias") @@ -160,12 +221,22 @@ public class RolloverIT extends ESIntegTestCase { final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-0"); assertTrue(oldIndex.getAliases().containsKey("test_alias")); + if (explicitWriteIndex) { + assertTrue(oldIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertNull(oldIndex.getAliases().get("test_alias").writeIndex()); + } final IndexMetaData newIndex = state.metaData().index("test_index-000001"); assertNull(newIndex); } public void testRolloverWithNewIndexName() throws Exception { - assertAcked(prepareCreate("test_index").addAlias(new Alias("test_alias")).get()); + Alias testAlias = new Alias("test_alias"); + boolean explicitWriteIndex = randomBoolean(); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index").addAlias(testAlias).get()); index("test_index", "type1", "1", "field", "value"); flush("test_index"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias") @@ -177,9 +248,14 @@ public class RolloverIT extends ESIntegTestCase { assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index"); - assertFalse(oldIndex.getAliases().containsKey("test_alias")); final IndexMetaData newIndex = state.metaData().index("test_new_index"); assertTrue(newIndex.getAliases().containsKey("test_alias")); + if (explicitWriteIndex) { + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + assertTrue(newIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertFalse(oldIndex.getAliases().containsKey("test_alias")); + } } public void testRolloverOnExistingIndex() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 6149c380cd7..d59700f2b7a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -172,39 +172,75 @@ public class TransportRolloverActionTests extends ESTestCase { assertTrue(foundRemove); } + public void testCreateUpdateAliasRequestWithExplicitWriteIndex() { + String sourceAlias = randomAlphaOfLength(10); + String sourceIndex = randomAlphaOfLength(10); + String targetIndex = randomAlphaOfLength(10); + final RolloverRequest rolloverRequest = new RolloverRequest(sourceAlias, targetIndex); + final IndicesAliasesClusterStateUpdateRequest updateRequest = + TransportRolloverAction.prepareRolloverAliasesWriteIndexUpdateRequest(sourceIndex, targetIndex, rolloverRequest); + + List actions = updateRequest.actions(); + assertThat(actions, hasSize(2)); + boolean foundAddWrite = false; + boolean foundRemoveWrite = false; + for (AliasAction action : actions) { + AliasAction.Add addAction = (AliasAction.Add) action; + if (action.getIndex().equals(targetIndex)) { + assertEquals(sourceAlias, addAction.getAlias()); + assertTrue(addAction.writeIndex()); + foundAddWrite = true; + } else if (action.getIndex().equals(sourceIndex)) { + assertEquals(sourceAlias, addAction.getAlias()); + assertFalse(addAction.writeIndex()); + foundRemoveWrite = true; + } else { + throw new AssertionError("Unknow index [" + action.getIndex() + "]"); + } + } + assertTrue(foundAddWrite); + assertTrue(foundRemoveWrite); + } + public void testValidation() { String index1 = randomAlphaOfLength(10); - String alias = randomAlphaOfLength(10); + String aliasWithWriteIndex = randomAlphaOfLength(10); String index2 = randomAlphaOfLength(10); - String aliasWithMultipleIndices = randomAlphaOfLength(10); + String aliasWithNoWriteIndex = randomAlphaOfLength(10); + Boolean firstIsWriteIndex = randomFrom(false, null); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); - final MetaData metaData = MetaData.builder() + MetaData.Builder metaDataBuilder = MetaData.builder() .put(IndexMetaData.builder(index1) .settings(settings) - .putAlias(AliasMetaData.builder(alias)) - .putAlias(AliasMetaData.builder(aliasWithMultipleIndices)) - ) - .put(IndexMetaData.builder(index2) - .settings(settings) - .putAlias(AliasMetaData.builder(aliasWithMultipleIndices)) - ).build(); + .putAlias(AliasMetaData.builder(aliasWithWriteIndex)) + .putAlias(AliasMetaData.builder(aliasWithNoWriteIndex).writeIndex(firstIsWriteIndex)) + ); + IndexMetaData.Builder indexTwoBuilder = IndexMetaData.builder(index2).settings(settings); + if (firstIsWriteIndex == null) { + indexTwoBuilder.putAlias(AliasMetaData.builder(aliasWithNoWriteIndex).writeIndex(randomFrom(false, null))); + } + metaDataBuilder.put(indexTwoBuilder); + MetaData metaData = metaDataBuilder.build(); - expectThrows(IllegalArgumentException.class, () -> - TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithMultipleIndices, + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithNoWriteIndex, randomAlphaOfLength(10)))); - expectThrows(IllegalArgumentException.class, () -> + assertThat(exception.getMessage(), equalTo("source alias [" + aliasWithNoWriteIndex + "] does not point to a write index")); + exception = expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(randomFrom(index1, index2), randomAlphaOfLength(10)))); - expectThrows(IllegalArgumentException.class, () -> + assertThat(exception.getMessage(), equalTo("source alias is a concrete index")); + exception = expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(randomAlphaOfLength(5), randomAlphaOfLength(10))) ); - TransportRolloverAction.validate(metaData, new RolloverRequest(alias, randomAlphaOfLength(10))); + assertThat(exception.getMessage(), equalTo("source alias does not exist")); + TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithWriteIndex, randomAlphaOfLength(10))); } public void testGenerateRolloverIndexName() { @@ -248,7 +284,7 @@ public class TransportRolloverActionTests extends ESTestCase { public void testRejectDuplicateAlias() { final IndexTemplateMetaData template = IndexTemplateMetaData.builder("test-template") .patterns(Arrays.asList("foo-*", "bar-*")) - .putAlias(AliasMetaData.builder("foo-write")).putAlias(AliasMetaData.builder("bar-write")) + .putAlias(AliasMetaData.builder("foo-write")).putAlias(AliasMetaData.builder("bar-write").writeIndex(randomBoolean())) .build(); final MetaData metaData = MetaData.builder().put(createMetaData(), false).put(template).build(); String indexName = randomFrom("foo-123", "bar-xyz"); From c69e62d96fef43e526bd130acc429a5a8536cc56 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 30 Jul 2018 14:46:24 -0700 Subject: [PATCH 05/22] Painless: Add PainlessConstructor (#32447) PainlessMethod was being used as both a method and a constructor, and while there are similarities, there are also some major differences. This allows the reflection objects to be stored reducing the number of other pieces of data stored in a PainlessMethod as they are now redundant. This temporarily increases some of the code in FunctionRef and PainlessDocGenerator as they now differentiate between constructors and methods, BUT is also makes the code more maintainable because there aren't checks in several places anymore to differentiate. --- .../java/org/elasticsearch/painless/Def.java | 2 +- .../elasticsearch/painless/FunctionRef.java | 119 +++++++++++++----- .../painless/lookup/PainlessClass.java | 6 +- .../painless/lookup/PainlessClassBuilder.java | 4 +- .../painless/lookup/PainlessConstructor.java | 39 ++++++ .../lookup/PainlessLookupBuilder.java | 59 ++++++--- .../lookup/PainlessLookupUtility.java | 7 ++ .../painless/node/ECapturingFunctionRef.java | 6 +- .../painless/node/EFunctionRef.java | 2 +- .../painless/node/EListInit.java | 11 +- .../elasticsearch/painless/node/EMapInit.java | 11 +- .../elasticsearch/painless/node/ENewObj.java | 18 +-- .../painless/PainlessDocGenerator.java | 99 +++++++++++++-- 13 files changed, 307 insertions(+), 76 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 8a90f53b4fd..9be2559e67a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -368,7 +368,7 @@ public final class Def { ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); } else { // whitelist lookup - ref = new FunctionRef(painlessLookup, clazz, type, call, captures.length); + ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length); } final CallSite callSite = LambdaBootstrap.lambdaBootstrap( methodHandlesLookup, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 7b7e8d25f39..0da91438326 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -20,13 +20,16 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; +import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.List; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE; @@ -59,8 +62,10 @@ public class FunctionRef { /** interface method */ public final PainlessMethod interfaceMethod; - /** delegate method */ - public final PainlessMethod delegateMethod; + /** delegate method type parameters */ + public final List> delegateTypeParameters; + /** delegate method return type */ + public final Class delegateReturnType; /** factory method type descriptor */ public final String factoryDescriptor; @@ -80,9 +85,47 @@ public class FunctionRef { * @param call the right hand side of a method reference expression * @param numCaptures number of captured arguments */ - public FunctionRef(PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { - this(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, - lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); + public static FunctionRef resolveFromLookup( + PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { + + if ("new".equals(call)) { + return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, + lookup(painlessLookup, expected, type), numCaptures); + } else { + return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, + lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); + } + } + + /** + * Creates a new FunctionRef (already resolved) + * @param expected functional interface type to implement + * @param interfaceMethod functional interface method + * @param delegateConstructor implementation constructor + * @param numCaptures number of captured arguments + */ + public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) { + Constructor javaConstructor = delegateConstructor.javaConstructor; + MethodType delegateMethodType = delegateConstructor.methodType; + + interfaceMethodName = interfaceMethod.name; + factoryMethodType = MethodType.methodType(expected, + delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); + interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); + + delegateClassName = javaConstructor.getDeclaringClass().getName(); + isDelegateInterface = false; + delegateInvokeType = H_NEWINVOKESPECIAL; + delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; + this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); + + this.interfaceMethod = interfaceMethod; + delegateTypeParameters = delegateConstructor.typeParameters; + delegateReturnType = void.class; + + factoryDescriptor = factoryMethodType.toMethodDescriptorString(); + interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); + delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); } /** @@ -112,9 +155,7 @@ public class FunctionRef { isDelegateInterface = delegateMethod.target.isInterface(); } - if ("".equals(delegateMethod.name)) { - delegateInvokeType = H_NEWINVOKESPECIAL; - } else if (Modifier.isStatic(delegateMethod.modifiers)) { + if (Modifier.isStatic(delegateMethod.modifiers)) { delegateInvokeType = H_INVOKESTATIC; } else if (delegateMethod.target.isInterface()) { delegateInvokeType = H_INVOKEINTERFACE; @@ -126,7 +167,8 @@ public class FunctionRef { this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); this.interfaceMethod = interfaceMethod; - this.delegateMethod = delegateMethod; + delegateTypeParameters = delegateMethod.arguments; + delegateReturnType = delegateMethod.rtn; factoryDescriptor = factoryMethodType.toMethodDescriptorString(); interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); @@ -151,13 +193,37 @@ public class FunctionRef { isDelegateInterface = false; this.interfaceMethod = null; - delegateMethod = null; + delegateTypeParameters = null; + delegateReturnType = null; factoryDescriptor = null; interfaceType = null; delegateType = null; } + /** + * Looks up {@code type} from the whitelist, and returns a matching constructor. + */ + private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class expected, String type) { + // check its really a functional interface + // for e.g. Comparable + PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; + if (method == null) { + throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " + + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); + } + + // lookup requested constructor + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); + PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.arguments.size())); + + if (impl == null) { + throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]"); + } + + return impl; + } + /** * Looks up {@code type::call} from the whitelist, and returns a matching method. */ @@ -174,27 +240,22 @@ public class FunctionRef { // lookup requested method PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); final PainlessMethod impl; - // ctor ref - if ("new".equals(call)) { - impl = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", method.arguments.size())); - } else { - // look for a static impl first - PainlessMethod staticImpl = - struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); - if (staticImpl == null) { - // otherwise a virtual impl - final int arity; - if (receiverCaptured) { - // receiver captured - arity = method.arguments.size(); - } else { - // receiver passed - arity = method.arguments.size() - 1; - } - impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); + // look for a static impl first + PainlessMethod staticImpl = + struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); + if (staticImpl == null) { + // otherwise a virtual impl + final int arity; + if (receiverCaptured) { + // receiver captured + arity = method.arguments.size(); } else { - impl = staticImpl; + // receiver passed + arity = method.arguments.size() - 1; } + impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); + } else { + impl = staticImpl; } if (impl == null) { throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " + diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 24dcf0ebdba..835bfb5c505 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -24,7 +24,8 @@ import java.util.Collections; import java.util.Map; public final class PainlessClass { - public final Map constructors; + public final Map constructors; + public final Map staticMethods; public final Map methods; @@ -36,13 +37,14 @@ public final class PainlessClass { public final PainlessMethod functionalMethod; - PainlessClass(Map constructors, + PainlessClass(Map constructors, Map staticMethods, Map methods, Map staticFields, Map fields, Map getterMethodHandles, Map setterMethodHandles, PainlessMethod functionalMethod) { this.constructors = Collections.unmodifiableMap(constructors); + this.staticMethods = Collections.unmodifiableMap(staticMethods); this.methods = Collections.unmodifiableMap(methods); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 2f41ed5dca8..866f711ba0f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -24,7 +24,8 @@ import java.util.HashMap; import java.util.Map; final class PainlessClassBuilder { - final Map constructors; + final Map constructors; + final Map staticMethods; final Map methods; @@ -38,6 +39,7 @@ final class PainlessClassBuilder { PainlessClassBuilder() { constructors = new HashMap<>(); + staticMethods = new HashMap<>(); methods = new HashMap<>(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java new file mode 100644 index 00000000000..76597c1a29d --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodType; +import java.lang.reflect.Constructor; +import java.util.List; + +public class PainlessConstructor { + public final Constructor javaConstructor; + public final List> typeParameters; + public final MethodHandle methodHandle; + public final MethodType methodType; + + PainlessConstructor(Constructor javaConstructor, List> typeParameters, MethodHandle methodHandle, MethodType methodType) { + this.javaConstructor = javaConstructor; + this.typeParameters = typeParameters; + this.methodHandle = methodHandle; + this.methodType = methodType; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ba48fbea734..b0a11dea58a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -40,15 +40,47 @@ import java.util.Map; import java.util.Objects; import java.util.regex.Pattern; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCanonicalTypeNames; -public class PainlessLookupBuilder { +public final class PainlessLookupBuilder { + + private static class PainlessConstructorCacheKey { + + private final Class targetType; + private final List> typeParameters; + + private PainlessConstructorCacheKey(Class targetType, List> typeParameters) { + this.targetType = targetType; + this.typeParameters = Collections.unmodifiableList(typeParameters); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object; + + return Objects.equals(targetType, that.targetType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + return Objects.hash(targetType, typeParameters); + } + } private static class PainlessMethodCacheKey { @@ -120,8 +152,9 @@ public class PainlessLookupBuilder { } } - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessConstuctorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -343,11 +376,10 @@ public class PainlessLookupBuilder { "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); } - String painlessMethodKey = buildPainlessMethodKey(CONSTRUCTOR_NAME, typeParametersSize); - PainlessMethod painlessConstructor = painlessClassBuilder.constructors.get(painlessMethodKey); + String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); + PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); if (painlessConstructor == null) { - org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor); MethodHandle methodHandle; try { @@ -359,17 +391,16 @@ public class PainlessLookupBuilder { MethodType methodType = methodHandle.type(); - painlessConstructor = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, CONSTRUCTOR_NAME, typeParameters), - key -> new PainlessMethod(CONSTRUCTOR_NAME, targetClass, null, void.class, typeParameters, - asmConstructor, javaConstructor.getModifiers(), methodHandle, methodType) + painlessConstructor = painlessConstuctorCache.computeIfAbsent( + new PainlessConstructorCacheKey(targetClass, typeParameters), + key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType) ); - painlessClassBuilder.constructors.put(painlessMethodKey, painlessConstructor); - } else if (painlessConstructor.arguments.equals(typeParameters) == false){ + painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor); + } else if (painlessConstructor.typeParameters.equals(typeParameters) == false){ throw new IllegalArgumentException("cannot have constructors " + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.arguments) + "] " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " + "with the same arity and different type parameters"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 86d3f876638..0a181c5f1b0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -336,6 +336,13 @@ public final class PainlessLookupUtility { type == String.class; } + /** + * Constructs a painless constructor key used to lookup painless constructors from a painless class. + */ + public static String buildPainlessConstructorKey(int constructorArity) { + return CONSTRUCTOR_NAME + "/" + constructorArity; + } + /** * Constructs a painless method key used to lookup painless methods from a painless class. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index 7b35bc1b48e..a4f7b130045 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -77,18 +77,18 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef(locals.getPainlessLookup(), expected, + ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { Class from = ref.interfaceMethod.arguments.get(i); - Class to = ref.delegateMethod.arguments.get(i); + Class to = ref.delegateTypeParameters.get(i); AnalyzerCaster.getLegalCast(location, from, to, false, true); } if (ref.interfaceMethod.rtn != void.class) { - AnalyzerCaster.getLegalCast(location, ref.delegateMethod.rtn, ref.interfaceMethod.rtn, false, true); + AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.rtn, false, true); } } catch (IllegalArgumentException e) { throw createError(e); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index d787db5d41c..23fb8a4180e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -90,7 +90,7 @@ public final class EFunctionRef extends AExpression implements ILambda { } } else { // whitelist lookup - ref = new FunctionRef(locals.getPainlessLookup(), expected, type, call, 0); + ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0); } } catch (IllegalArgumentException e) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index e0af653d209..bd4bea95ea0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -23,10 +23,12 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.ArrayList; import java.util.List; @@ -38,7 +40,7 @@ import java.util.Set; public final class EListInit extends AExpression { private final List values; - private PainlessMethod constructor = null; + private PainlessConstructor constructor = null; private PainlessMethod method = null; public EListInit(Location location, List values) { @@ -62,8 +64,8 @@ public final class EListInit extends AExpression { actual = ArrayList.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors - .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( + PainlessLookupUtility.buildPainlessConstructorKey(0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -92,7 +94,8 @@ public final class EListInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); for (AExpression value : values) { writer.dup(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index d81f08dc3cc..5bef62a4b18 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -23,10 +23,12 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.HashMap; import java.util.List; @@ -39,7 +41,7 @@ public final class EMapInit extends AExpression { private final List keys; private final List values; - private PainlessMethod constructor = null; + private PainlessConstructor constructor = null; private PainlessMethod method = null; public EMapInit(Location location, List keys, List values) { @@ -68,8 +70,8 @@ public final class EMapInit extends AExpression { actual = HashMap.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors - .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( + PainlessLookupUtility.buildPainlessConstructorKey(0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -111,7 +113,8 @@ public final class EMapInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); for (int index = 0; index < keys.size(); ++index) { AExpression key = keys.get(index); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index f092a17c9fc..4e08f257386 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -24,9 +24,10 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.List; import java.util.Objects; @@ -40,7 +41,7 @@ public final class ENewObj extends AExpression { private final String type; private final List arguments; - private PainlessMethod constructor; + private PainlessConstructor constructor; public ENewObj(Location location, String type, List arguments) { super(location); @@ -65,16 +66,16 @@ public final class ENewObj extends AExpression { } PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", arguments.size())); + constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(arguments.size())); if (constructor != null) { - Class[] types = new Class[constructor.arguments.size()]; - constructor.arguments.toArray(types); + Class[] types = new Class[constructor.typeParameters.size()]; + constructor.typeParameters.toArray(types); - if (constructor.arguments.size() != arguments.size()) { + if (constructor.typeParameters.size() != arguments.size()) { throw createError(new IllegalArgumentException( "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + - "expected [" + constructor.arguments.size() + "] arguments, but found [" + arguments.size() + "].")); + "expected [" + constructor.typeParameters.size() + "] arguments, but found [" + arguments.size() + "].")); } for (int argument = 0; argument < arguments.size(); ++argument) { @@ -107,7 +108,8 @@ public final class ENewObj extends AExpression { argument.write(writer, globals); } - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); } @Override diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index ff0d4231175..65429154301 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; @@ -57,7 +58,8 @@ public class PainlessDocGenerator { private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); private static final Comparator FIELD_NAME = comparing(f -> f.name); private static final Comparator METHOD_NAME = comparing(m -> m.name); - private static final Comparator NUMBER_OF_ARGS = comparing(m -> m.arguments.size()); + private static final Comparator METHOD_NUMBER_OF_PARAMS = comparing(m -> m.arguments.size()); + private static final Comparator CONSTRUCTOR_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size()); public static void main(String[] args) throws IOException { Path apiRootPath = PathUtils.get(args[0]); @@ -103,12 +105,15 @@ public class PainlessDocGenerator { Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); + Consumer documentConstructor = + constructor -> PainlessDocGenerator.documentConstructor(typeStream, constructor); struct.staticFields.values().stream().sorted(FIELD_NAME).forEach(documentField); struct.fields.values().stream().sorted(FIELD_NAME).forEach(documentField); - struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); - struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); + struct.staticMethods.values().stream().sorted( + METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(documentMethod); + struct.constructors.values().stream().sorted(CONSTRUCTOR_NUMBER_OF_PARAMS).forEach(documentConstructor); Map> inherited = new TreeMap<>(); - struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { + struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(method -> { if (method.target == clazz) { documentMethod(typeStream, method); } else { @@ -164,6 +169,41 @@ public class PainlessDocGenerator { stream.println(); } + /** + * Document a constructor. + */ + private static void documentConstructor(PrintStream stream, PainlessConstructor constructor) { + stream.print("* ++[["); + emitAnchor(stream, constructor); + stream.print("]]"); + + String javadocRoot = javadocRoot(constructor.javaConstructor.getDeclaringClass()); + emitJavadocLink(stream, javadocRoot, constructor); + stream.print('['); + + stream.print(constructorName(constructor)); + + stream.print("]("); + boolean first = true; + for (Class arg : constructor.typeParameters) { + if (first) { + first = false; + } else { + stream.print(", "); + } + emitType(stream, arg); + } + stream.print(")++"); + + if (javadocRoot.equals("java8")) { + stream.print(" ("); + emitJavadocLink(stream, "java9", constructor); + stream.print("[java 9])"); + } + + stream.println(); + } + /** * Document a method. */ @@ -176,10 +216,8 @@ public class PainlessDocGenerator { stream.print("static "); } - if (false == method.name.equals("")) { - emitType(stream, method.rtn); - stream.print(' '); - } + emitType(stream, method.rtn); + stream.print(' '); String javadocRoot = javadocRoot(method); emitJavadocLink(stream, javadocRoot, method); @@ -216,6 +254,17 @@ public class PainlessDocGenerator { stream.print(PainlessLookupUtility.typeToCanonicalTypeName(clazz).replace('.', '-')); } + /** + * Anchor text for a {@link PainlessConstructor}. + */ + private static void emitAnchor(PrintStream stream, PainlessConstructor constructor) { + emitAnchor(stream, constructor.javaConstructor.getDeclaringClass()); + stream.print('-'); + stream.print(constructorName(constructor)); + stream.print('-'); + stream.print(constructor.typeParameters.size()); + } + /** * Anchor text for a {@link PainlessMethod}. */ @@ -236,8 +285,12 @@ public class PainlessDocGenerator { stream.print(field.name); } + private static String constructorName(PainlessConstructor constructor) { + return PainlessLookupUtility.typeToCanonicalTypeName(constructor.javaConstructor.getDeclaringClass()); + } + private static String methodName(PainlessMethod method) { - return method.name.equals("") ? PainlessLookupUtility.typeToCanonicalTypeName(method.target) : method.name; + return PainlessLookupUtility.typeToCanonicalTypeName(method.target); } /** @@ -269,6 +322,34 @@ public class PainlessDocGenerator { } } + /** + * Emit an external link to Javadoc for a {@link PainlessMethod}. + * + * @param root name of the root uri variable + */ + private static void emitJavadocLink(PrintStream stream, String root, PainlessConstructor constructor) { + stream.print("link:{"); + stream.print(root); + stream.print("-javadoc}/"); + stream.print(classUrlPath(constructor.javaConstructor.getDeclaringClass())); + stream.print(".html#"); + stream.print(constructorName(constructor)); + stream.print("%2D"); + boolean first = true; + for (Class clazz: constructor.typeParameters) { + if (first) { + first = false; + } else { + stream.print("%2D"); + } + stream.print(clazz.getName()); + if (clazz.isArray()) { + stream.print(":A"); + } + } + stream.print("%2D"); + } + /** * Emit an external link to Javadoc for a {@link PainlessMethod}. * From 670630948b4831704fbaf4821554d4ca52a5a42d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 30 Jul 2018 18:07:49 -0400 Subject: [PATCH 06/22] Switch security spi example to new style Requests (#32341) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `x-pack/qa/security-example-spi-extension` project to use the new versions. --- .../example/realm/CustomRealmIT.java | 14 +++++---- .../example/role/CustomRolesProviderIT.java | 29 ++++++++++++------- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java index 65ec595a0d4..f7bdb0d0baa 100644 --- a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java +++ b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.example.realm; -import org.apache.http.message.BasicHeader; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.NoNodeAvailableException; @@ -50,7 +51,7 @@ public class CustomRealmIT extends ESIntegTestCase { public void testHttpConnectionWithNoAuthentication() throws Exception { try { - getRestClient().performRequest("GET", "/"); + getRestClient().performRequest(new Request("GET", "/")); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); @@ -61,9 +62,12 @@ public class CustomRealmIT extends ESIntegTestCase { } public void testHttpAuthentication() throws Exception { - Response response = getRestClient().performRequest("GET", "/", - new BasicHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER), - new BasicHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString())); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER); + options.addHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } diff --git a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java index bd3c53a3b41..57a895848e3 100644 --- a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java +++ b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.example.role; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.network.NetworkModule; @@ -33,10 +34,17 @@ import static org.hamcrest.Matchers.is; * Integration test for custom roles providers. */ public class CustomRolesProviderIT extends ESIntegTestCase { - private static final String TEST_USER = "test_user"; private static final String TEST_PWD = "change_me"; + private static final RequestOptions AUTH_OPTIONS; + static { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue(TEST_USER, new SecureString(TEST_PWD.toCharArray()))); + AUTH_OPTIONS = options.build(); + } + @Override protected Settings externalClusterClientSettings() { return Settings.builder() @@ -59,7 +67,9 @@ public class CustomRolesProviderIT extends ESIntegTestCase { public void testAuthorizedCustomRoleSucceeds() throws Exception { setupTestUser(ROLE_B); // roleB has all permissions on index "foo", so creating "foo" should succeed - Response response = getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } @@ -71,7 +81,9 @@ public class CustomRolesProviderIT extends ESIntegTestCase { setupTestUser(ROLE_A); // roleB has all permissions on index "foo", so creating "foo" should succeed try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + getRestClient().performRequest(request); fail(ROLE_A + " should not be authorized to create index " + INDEX); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -82,15 +94,12 @@ public class CustomRolesProviderIT extends ESIntegTestCase { setupTestUser("unknown"); // roleB has all permissions on index "foo", so creating "foo" should succeed try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + getRestClient().performRequest(request); fail(ROLE_A + " should not be authorized to create index " + INDEX); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); } } - - private BasicHeader authHeader() { - return new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - basicAuthHeaderValue(TEST_USER, new SecureString(TEST_PWD.toCharArray()))); - } } From 4101fc4e3d0bd3ccc64c99e1bdd93dee6d8df4d8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 30 Jul 2018 18:16:26 -0400 Subject: [PATCH 07/22] Switch security to new style Requests (#32290) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `x-pack/plugin/security` project to use the new versions. --- .../integration/BulkUpdateTests.java | 63 +++++++++---------- .../integration/ClearRealmsCacheTests.java | 16 +++-- .../integration/IndexPrivilegeTests.java | 12 ++-- .../elasticsearch/license/LicensingTests.java | 32 ++++++---- .../test/NativeRealmIntegTestCase.java | 36 +++++------ .../xpack/security/SecurityPluginTests.java | 17 ++--- .../security/audit/index/AuditTrailTests.java | 23 ++++--- .../xpack/security/authc/RunAsIntegTests.java | 53 ++++++++-------- .../authc/pki/PkiOptionalClientAuthTests.java | 15 +++-- .../action/RestAuthenticateActionTests.java | 16 ++--- .../user/AnonymousUserIntegTests.java | 3 +- .../xpack/ssl/SSLClientAuthTests.java | 12 ++-- 12 files changed, 161 insertions(+), 137 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java index 03ace7fb71d..bb0036e9f87 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.integration; -import org.apache.http.Header; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; @@ -24,10 +21,8 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; public class BulkUpdateTests extends SecurityIntegTestCase { @@ -77,46 +72,48 @@ public class BulkUpdateTests extends SecurityIntegTestCase { public void testThatBulkUpdateDoesNotLoseFieldsHttp() throws IOException { final String path = "/index1/type/1"; - final Header basicAuthHeader = new BasicHeader("Authorization", - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + final RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + final RequestOptions options = optionsBuilder.build(); - StringEntity body = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON); - Response response = getRestClient().performRequest("PUT", path, Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(201)); + Request createRequest = new Request("PUT", path); + createRequest.setOptions(options); + createRequest.setJsonEntity("{\"test\":\"test\"}"); + getRestClient().performRequest(createRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - assertThat(EntityUtils.toString(response.getEntity()), containsString("\"test\":\"test\"")); + Request getRequest = new Request("GET", path); + getRequest.setOptions(options); + assertThat(EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()), containsString("\"test\":\"test\"")); if (randomBoolean()) { flushAndRefresh(); } //update with new field - body = new StringEntity("{\"doc\": {\"not test\": \"not test\"}}", ContentType.APPLICATION_JSON); - response = getRestClient().performRequest("POST", path + "/_update", Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + Request updateRequest = new Request("POST", path + "/_update"); + updateRequest.setOptions(options); + updateRequest.setJsonEntity("{\"doc\": {\"not test\": \"not test\"}}"); + getRestClient().performRequest(updateRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - String responseBody = EntityUtils.toString(response.getEntity()); - assertThat(responseBody, containsString("\"test\":\"test\"")); - assertThat(responseBody, containsString("\"not test\":\"not test\"")); + String afterUpdate = EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()); + assertThat(afterUpdate, containsString("\"test\":\"test\"")); + assertThat(afterUpdate, containsString("\"not test\":\"not test\"")); // this part is important. Without this, the document may be read from the translog which would bypass the bug where // FLS kicks in because the request can't be found and only returns meta fields flushAndRefresh(); - body = new StringEntity("{\"update\": {\"_index\": \"index1\", \"_type\": \"type\", \"_id\": \"1\"}}\n" + - "{\"doc\": {\"bulk updated\":\"bulk updated\"}}\n", ContentType.APPLICATION_JSON); - response = getRestClient().performRequest("POST", "/_bulk", Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.setOptions(options); + bulkRequest.setJsonEntity( + "{\"update\": {\"_index\": \"index1\", \"_type\": \"type\", \"_id\": \"1\"}}\n" + + "{\"doc\": {\"bulk updated\":\"bulk updated\"}}\n"); + getRestClient().performRequest(bulkRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - responseBody = EntityUtils.toString(response.getEntity()); - assertThat(responseBody, containsString("\"test\":\"test\"")); - assertThat(responseBody, containsString("\"not test\":\"not test\"")); - assertThat(responseBody, containsString("\"bulk updated\":\"bulk updated\"")); + String afterBulk = EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()); + assertThat(afterBulk, containsString("\"test\":\"test\"")); + assertThat(afterBulk, containsString("\"not test\":\"not test\"")); + assertThat(afterBulk, containsString("\"bulk updated\":\"bulk updated\"")); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java index 8b9e1954264..fc02a5c4d62 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.integration; -import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -160,10 +161,15 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase { } static void executeHttpRequest(String path, Map params) throws Exception { - Response response = getRestClient().performRequest("POST", path, params, - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + Request request = new Request("POST", path); + for (Map.Entry param : params.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertNotNull(response.getEntity()); assertTrue(EntityUtils.toString(response.getEntity()).contains("cluster_name")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java index 825bfcd432f..57262822982 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.integration; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -388,9 +389,12 @@ public class IndexPrivilegeTests extends AbstractPrivilegeTestCase { public void testThatUnknownUserIsRejectedProperly() throws Exception { try { - getRestClient().performRequest("GET", "/", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue("idonotexist", new SecureString("passwd".toCharArray())))); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", + UsernamePasswordToken.basicAuthHeaderValue("idonotexist", new SecureString("passwd".toCharArray()))); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 012050f4259..7a35b0bc422 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.license; -import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -15,6 +14,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.NoNodeAvailableException; @@ -189,31 +190,36 @@ public class LicensingTests extends SecurityIntegTestCase { } public void testRestAuthenticationByLicenseType() throws Exception { - Response response = getRestClient().performRequest("GET", "/"); + Response unauthorizedRootResponse = getRestClient().performRequest(new Request("GET", "/")); // the default of the licensing tests is basic - assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(unauthorizedRootResponse.getStatusLine().getStatusCode(), is(200)); ResponseException e = expectThrows(ResponseException.class, - () -> getRestClient().performRequest("GET", "/_xpack/security/_authenticate")); + () -> getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); // generate a new license with a mode that enables auth License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.TRIAL, License.OperationMode.PLATINUM, License.OperationMode.STANDARD); enableLicensing(mode); - e = expectThrows(ResponseException.class, () -> getRestClient().performRequest("GET", "/")); + e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(new Request("GET", "/"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); e = expectThrows(ResponseException.class, - () -> getRestClient().performRequest("GET", "/_xpack/security/_authenticate")); + () -> getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); - final String basicAuthValue = UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())); - response = getRestClient().performRequest("GET", "/", new BasicHeader("Authorization", basicAuthValue)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate", - new BasicHeader("Authorization", basicAuthValue)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); + RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + RequestOptions options = optionsBuilder.build(); + Request rootRequest = new Request("GET", "/"); + rootRequest.setOptions(options); + Response authorizedRootResponse = getRestClient().performRequest(rootRequest); + assertThat(authorizedRootResponse.getStatusLine().getStatusCode(), is(200)); + Request authenticateRequest = new Request("GET", "/_xpack/security/_authenticate"); + authenticateRequest.setOptions(options); + Response authorizedAuthenticateResponse = getRestClient().performRequest(authenticateRequest); + assertThat(authorizedAuthenticateResponse.getStatusLine().getStatusCode(), is(200)); } public void testSecurityActionsByLicenseType() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 72550832779..af5b73d889d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -5,12 +5,8 @@ */ package org.elasticsearch.test; - -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.set.Sets; @@ -26,7 +22,6 @@ import org.junit.Before; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Set; /** @@ -82,23 +77,22 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase { public void setupReservedPasswords(RestClient restClient) throws IOException { logger.info("setting up reserved passwords for test"); { - String payload = "{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD)); - String route = "/_xpack/security/user/elastic/_password"; - Response response = restClient.performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - assertEquals(response.getStatusLine().getReasonPhrase(), 200, response.getStatusLine().getStatusCode()); + Request request = new Request("PUT", "/_xpack/security/user/elastic/_password"); + request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD)); + request.setOptions(options); + restClient.performRequest(request); } + RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); + RequestOptions options = optionsBuilder.build(); for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) { - String payload = "{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); - String route = "/_xpack/security/user/" + username + "/_password"; - Response response = restClient.performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - assertEquals(response.getStatusLine().getReasonPhrase(), 200, response.getStatusLine().getStatusCode()); + Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password"); + request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); + request.setOptions(options); + restClient.performRequest(request); } logger.info("setting up reserved passwords finished"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java index 368449adeed..d57a5d151a9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.security; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.io.IOException; @@ -31,17 +31,20 @@ public class SecurityPluginTests extends SecurityIntegTestCase { public void testThatPluginIsLoaded() throws IOException { try { logger.info("executing unauthorized request to /_xpack info"); - getRestClient().performRequest("GET", "/_xpack"); + getRestClient().performRequest(new Request("GET", "/_xpack")); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(UNAUTHORIZED.getStatus())); } logger.info("executing authorized request to /_xpack infos"); - Response response = getRestClient().performRequest("GET", "/_xpack", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + + Request request = new Request("GET", "/_xpack"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(OK.getStatus())); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 453820ea519..ef3c6aa56ae 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -5,12 +5,13 @@ */ package org.elasticsearch.xpack.security.audit.index; -import org.apache.http.message.BasicHeader; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Requests; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -89,10 +90,12 @@ public class AuditTrailTests extends SecurityIntegTestCase { public void testAuditAccessDeniedWithRunAsUser() throws Exception { try { - getRestClient().performRequest("GET", "/.security/_search", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, EXECUTE_USER)); + Request request = new Request("GET", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, EXECUTE_USER); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (final ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -111,10 +114,12 @@ public class AuditTrailTests extends SecurityIntegTestCase { public void testAuditRunAsDeniedEmptyUser() throws Exception { try { - getRestClient().performRequest("GET", "/.security/_search", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); + Request request = new Request("GET", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (final ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java index 9311b9c02f6..6d5c6770bf2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.security.authc; -import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.SecureString; @@ -126,11 +126,13 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testUserImpersonationUsingHttp() throws Exception { // use the transport client user and try to run as try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(TRANSPORT_CLIENT_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(TRANSPORT_CLIENT_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -139,10 +141,11 @@ public class RunAsIntegTests extends SecurityIntegTestCase { if (runAsHasSuperUserRole == false) { try { //the run as user shouldn't have access to the nodes api - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING))); + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -150,12 +153,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { } // but when running as a different user it should work - Response response = getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); + getRestClient().performRequest(requestForUserRunAsUser(SecuritySettingsSource.TEST_USER_NAME)); } public void testEmptyUserImpersonationHeader() throws Exception { @@ -183,11 +181,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testEmptyHeaderUsingHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); + getRestClient().performRequest(requestForUserRunAsUser("")); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); @@ -219,17 +213,22 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testNonExistentRunAsUserUsingHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "idontexist")); + getRestClient().performRequest(requestForUserRunAsUser("idontexist")); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); } } + private static Request requestForUserRunAsUser(String user) { + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, user); + request.setOptions(options); + return request; + } + // build our own here to better mimic an actual client... TransportClient getTransportClient(Settings extraSettings) { NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java index d47ffe4a344..4fb94c74949 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java @@ -5,8 +5,9 @@ */ package org.elasticsearch.xpack.security.authc.pki; -import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -76,13 +77,15 @@ public class PkiOptionalClientAuthTests extends SecuritySingleNodeTestCase { public void testRestClientWithoutClientCertificate() throws Exception { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(getSSLContext()); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - ResponseException e = expectThrows(ResponseException.class, () -> restClient.performRequest("GET", "_nodes")); + ResponseException e = expectThrows(ResponseException.class, () -> restClient.performRequest(new Request("GET", "_nodes"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); - Response response = restClient.performRequest("GET", "_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + Request request = new Request("GET", "_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = restClient.performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java index 67bfc2ecdcb..13a124e4bdc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; @@ -52,11 +53,12 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { } public void testAuthenticateApi() throws Exception { - Response response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate", - new BasicHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - ObjectPath objectPath = ObjectPath.createFromResponse(response); + Request request = new Request("GET", "/_xpack/security/_authenticate"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + ObjectPath objectPath = ObjectPath.createFromResponse(getRestClient().performRequest(request)); assertThat(objectPath.evaluate("username").toString(), equalTo(SecuritySettingsSource.TEST_USER_NAME)); List roles = objectPath.evaluate("roles"); assertThat(roles.size(), is(1)); @@ -65,7 +67,7 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { public void testAuthenticateApiWithoutAuthentication() throws Exception { try { - Response response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate"); + Response response = getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate")); if (anonymousEnabled) { assertThat(response.getStatusLine().getStatusCode(), is(200)); ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java index 9529a12a30a..431b3e855c6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.user; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -46,7 +47,7 @@ public class AnonymousUserIntegTests extends SecurityIntegTestCase { public void testAnonymousViaHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes"); + getRestClient().performRequest(new Request("GET", "/_nodes")); fail("request should have failed"); } catch(ResponseException e) { int statusCode = e.getResponse().getStatusLine().getStatusCode(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index d205c7cd933..21da604374f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -6,12 +6,13 @@ package org.elasticsearch.xpack.ssl; import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.transport.TransportClient; @@ -71,7 +72,7 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { public void testThatHttpFailsWithoutSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(SSLContexts.createDefault(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - restClient.performRequest("GET", "/"); + restClient.performRequest(new Request("GET", "/")); fail("Expected SSLHandshakeException"); } catch (IOException e) { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); @@ -87,8 +88,11 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { public void testThatHttpWorksWithSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(getSSLContext(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - Response response = restClient.performRequest("GET", "/", - new BasicHeader("Authorization", basicAuthHeaderValue(transportClientUsername(), transportClientPassword()))); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(transportClientUsername(), transportClientPassword())); + request.setOptions(options); + Response response = restClient.performRequest(request); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(EntityUtils.toString(response.getEntity()), containsString("You Know, for Search")); } From d75efbcf6839a63e05ae3b88c84b6324921f25b3 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 31 Jul 2018 09:07:47 +1000 Subject: [PATCH 08/22] Make get all app privs requires "*" permission (#32460) The default behaviour for "GetPrivileges" is to get all application privileges. This should only be allowed if the user has access to the "*" application. --- .../action/privilege/GetPrivilegesRequest.java | 2 +- .../privilege/ConditionalClusterPrivileges.java | 4 +++- .../ManageApplicationPrivilegesTests.java | 14 ++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java index 559e0ab8d98..585fb372d32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java @@ -50,7 +50,7 @@ public final class GetPrivilegesRequest extends ActionRequest implements Applica @Override public Collection getApplicationNames() { - return Collections.singleton(application); + return application == null ? Collections.emptySet() : Collections.singleton(application); } public void privileges(String... privileges) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java index c068c77781b..e204d89b1c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java @@ -135,7 +135,9 @@ public final class ConditionalClusterPrivileges { this.requestPredicate = request -> { if (request instanceof ApplicationPrivilegesRequest) { final ApplicationPrivilegesRequest privRequest = (ApplicationPrivilegesRequest) request; - return privRequest.getApplicationNames().stream().allMatch(application -> applicationPredicate.test(application)); + final Collection requestApplicationNames = privRequest.getApplicationNames(); + return requestApplicationNames.isEmpty() ? this.applicationNames.contains("*") + : requestApplicationNames.stream().allMatch(application -> applicationPredicate.test(application)); } return false; }; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java index a5c1bbc98d1..9c113d4ff0f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -50,6 +50,7 @@ import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class ManageApplicationPrivilegesTests extends ESTestCase { @@ -140,6 +141,19 @@ public class ManageApplicationPrivilegesTests extends ESTestCase { assertThat(cloudAndSwiftypePredicate, not(predicateMatches(putKibana))); } + public void testSecurityForGetAllApplicationPrivileges() { + final GetPrivilegesRequest getAll = new GetPrivilegesRequest(); + getAll.application(null); + getAll.privileges(new String[0]); + + assertThat(getAll.validate(), nullValue()); + + final ManageApplicationPrivileges kibanaOnly = new ManageApplicationPrivileges(Sets.newHashSet("kibana-*")); + final ManageApplicationPrivileges allApps = new ManageApplicationPrivileges(Sets.newHashSet("*")); + + assertThat(kibanaOnly.getRequestPredicate(), not(predicateMatches(getAll))); + assertThat(allApps.getRequestPredicate(), predicateMatches(getAll)); + } private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) { return new ManageApplicationPrivileges(new LinkedHashSet<>(original.getApplicationNames())); From f0b36679eca9653c3358842b137fdf354a328901 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Tue, 31 Jul 2018 10:59:36 +1000 Subject: [PATCH 09/22] [Kerberos] Remove Kerberos bootstrap checks (#32451) This commit removes Kerberos bootstrap checks as they were more validation checks and better done in Kerberos realm constructor than as bootstrap checks. This also moves the check for one Kerberos realm per node to where we initialize realms. This commit adds few validations which were missing earlier like missing read permissions on keytab file or if it is directory to throw exception with error message. --- .../xpack/security/Security.java | 4 +- .../xpack/security/authc/Realms.java | 9 ++ .../authc/kerberos/KerberosRealm.java | 11 ++ .../kerberos/KerberosRealmBootstrapCheck.java | 69 ----------- .../xpack/security/authc/RealmsTests.java | 44 +++++-- .../KerberosRealmBootstrapCheckTests.java | 114 ------------------ .../authc/kerberos/KerberosRealmTests.java | 54 +++++++++ 7 files changed, 106 insertions(+), 199 deletions(-) delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index f4bb4b7eb3b..df5e8dcae1b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -176,7 +176,6 @@ import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authc.kerberos.KerberosRealmBootstrapCheck; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; @@ -306,8 +305,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new PasswordHashingAlgorithmBootstrapCheck(), new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140JKSKeystoreBootstrapCheck(settings), - new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings), - new KerberosRealmBootstrapCheck(env))); + new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); Automatons.updateMaxDeterminizedStates(settings); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java index 0284ae9a05f..8b80c1f1d1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; /** @@ -152,6 +153,7 @@ public class Realms extends AbstractComponent implements Iterable { Settings realmsSettings = RealmSettings.get(settings); Set internalTypes = new HashSet<>(); List realms = new ArrayList<>(); + List kerberosRealmNames = new ArrayList<>(); for (String name : realmsSettings.names()) { Settings realmSettings = realmsSettings.getAsSettings(name); String type = realmSettings.get("type"); @@ -178,6 +180,13 @@ public class Realms extends AbstractComponent implements Iterable { } internalTypes.add(type); } + if (KerberosRealmSettings.TYPE.equals(type)) { + kerberosRealmNames.add(name); + if (kerberosRealmNames.size() > 1) { + throw new IllegalArgumentException("multiple realms " + kerberosRealmNames.toString() + " configured of type [" + type + + "], [" + type + "] can only have one such realm configured"); + } + } realms.add(factory.create(config)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index b4a8b6aabf0..71eeb8b2398 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.ietf.jgss.GSSException; +import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.List; @@ -87,6 +88,16 @@ public final class KerberosRealm extends Realm implements CachingRealm { this.kerberosTicketValidator = kerberosTicketValidator; this.threadPool = threadPool; this.keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + + if (Files.exists(keytabPath) == false) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] does not exist"); + } + if (Files.isDirectory(keytabPath)) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] is a directory"); + } + if (Files.isReadable(keytabPath) == false) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] must have read permission"); + } this.enableKerberosDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); this.removeRealmName = KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.get(config.settings()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java deleted file mode 100644 index bab899a8664..00000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.security.authc.kerberos; - -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.security.authc.RealmSettings; -import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; -import java.util.Map.Entry; - -/** - * This class is used to perform bootstrap checks for kerberos realm. - *

- * We use service keytabs for validating incoming kerberos tickets and is a - * required configuration. Due to JVM wide system properties for Kerberos we - * cannot support multiple Kerberos realms. This class adds checks for node to - * fail if service keytab does not exist or multiple kerberos realms have been - * configured. - */ -public class KerberosRealmBootstrapCheck implements BootstrapCheck { - private final Environment env; - - public KerberosRealmBootstrapCheck(final Environment env) { - this.env = env; - } - - @Override - public BootstrapCheckResult check(final BootstrapContext context) { - final Map realmsSettings = RealmSettings.getRealmSettings(context.settings); - boolean isKerberosRealmConfigured = false; - for (final Entry entry : realmsSettings.entrySet()) { - final String name = entry.getKey(); - final Settings realmSettings = entry.getValue(); - final String type = realmSettings.get("type"); - if (Strings.hasText(type) == false) { - return BootstrapCheckResult.failure("missing realm type for [" + name + "] realm"); - } - if (KerberosRealmSettings.TYPE.equals(type)) { - if (isKerberosRealmConfigured) { - return BootstrapCheckResult.failure( - "multiple [" + type + "] realms are configured. [" + type + "] can only have one such realm configured"); - } - isKerberosRealmConfigured = true; - - final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(realmSettings)); - if (Files.exists(keytabPath) == false) { - return BootstrapCheckResult.failure("configured service key tab file [" + keytabPath + "] does not exist"); - } - } - } - return BootstrapCheckResult.success(); - } - - @Override - public boolean alwaysEnforce() { - return true; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index a71f5cb1cf7..9d795826298 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -51,13 +52,16 @@ public class RealmsTests extends ESTestCase { private XPackLicenseState licenseState; private ThreadContext threadContext; private ReservedRealm reservedRealm; + private int randomRealmTypesCount; @Before public void init() throws Exception { factories = new HashMap<>(); factories.put(FileRealmSettings.TYPE, config -> new DummyRealm(FileRealmSettings.TYPE, config)); factories.put(NativeRealmSettings.TYPE, config -> new DummyRealm(NativeRealmSettings.TYPE, config)); - for (int i = 0; i < randomIntBetween(1, 5); i++) { + factories.put(KerberosRealmSettings.TYPE, config -> new DummyRealm(KerberosRealmSettings.TYPE, config)); + randomRealmTypesCount = randomIntBetween(1, 5); + for (int i = 0; i < randomRealmTypesCount; i++) { String name = "type_" + i; factories.put(name, config -> new DummyRealm(name, config)); } @@ -73,13 +77,13 @@ public class RealmsTests extends ESTestCase { public void testWithSettings() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); orderToIndex.put(orders.get(i), i); @@ -107,14 +111,14 @@ public class RealmsTests extends ESTestCase { public void testWithSettingsWhereDifferentRealmsHaveSameOrder() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List randomSeq = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List randomSeq = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { randomSeq.add(i); } Collections.shuffle(randomSeq, random()); TreeMap nameToRealmId = new TreeMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { int randomizedRealmId = randomSeq.get(i); String randomizedRealmName = randomAlphaOfLengthBetween(12,32); nameToRealmId.put("realm_" + randomizedRealmName, randomizedRealmId); @@ -181,13 +185,13 @@ public class RealmsTests extends ESTestCase { public void testUnlicensedWithOnlyCustomRealms() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); orderToIndex.put(orders.get(i), i); @@ -384,13 +388,13 @@ public class RealmsTests extends ESTestCase { public void testDisabledRealmsAreNotAdded() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); boolean enabled = randomBoolean(); @@ -520,6 +524,20 @@ public class RealmsTests extends ESTestCase { } } + public void testInitRealmsFailsForMultipleKerberosRealms() throws IOException { + final Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); + builder.put("xpack.security.authc.realms.realm_1.type", "kerberos"); + builder.put("xpack.security.authc.realms.realm_1.order", 1); + builder.put("xpack.security.authc.realms.realm_2.type", "kerberos"); + builder.put("xpack.security.authc.realms.realm_2.order", 2); + final Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm)); + assertThat(iae.getMessage(), is(equalTo( + "multiple realms [realm_1, realm_2] configured of type [kerberos], [kerberos] can only have one such realm configured"))); + } + static class DummyRealm extends Realm { DummyRealm(String type, RealmConfig config) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java deleted file mode 100644 index b6e1df9ddbb..00000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.security.authc.kerberos; - -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.authc.RealmSettings; -import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; -import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; - -import java.io.IOException; -import java.nio.file.Path; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; - -public class KerberosRealmBootstrapCheckTests extends ESTestCase { - - public void testBootstrapCheckFailsForMultipleKerberosRealms() throws IOException { - final Path tempDir = createTempDir(); - final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); - final Settings settings2 = buildKerberosRealmSettings("kerb2", false, tempDir); - final Settings settings3 = realm("pki1", PkiRealmSettings.TYPE, Settings.builder()).build(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).put(settings3).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), equalTo("multiple [" + KerberosRealmSettings.TYPE + "] realms are configured. [" - + KerberosRealmSettings.TYPE + "] can only have one such realm configured")); - } - - public void testBootstrapCheckFailsForMissingKeytabFile() throws IOException { - final Path tempDir = createTempDir(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", true, tempDir)).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), - equalTo("configured service key tab file [" + tempDir.resolve("kerb1.keytab").toString() + "] does not exist")); - } - - public void testBootstrapCheckFailsForMissingRealmType() throws IOException { - final Path tempDir = createTempDir(); - final String name = "kerb1"; - final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); - final Settings settings2 = realm(name, randomFrom("", " "), Settings.builder()).build(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), equalTo("missing realm type for [" + name + "] realm")); - } - - public void testBootstrapCheckSucceedsForCorrectConfiguration() throws IOException { - final Path tempDir = createTempDir(); - final Settings finalSettings = - Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", false, tempDir)).build(); - final BootstrapContext context = new BootstrapContext(finalSettings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isSuccess(), is(true)); - } - - public void testBootstrapCheckSucceedsForNoKerberosRealms() throws IOException { - final Path tempDir = createTempDir(); - final Settings finalSettings = Settings.builder().put("path.home", tempDir).build(); - final BootstrapContext context = new BootstrapContext(finalSettings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isSuccess(), is(true)); - } - - private Settings buildKerberosRealmSettings(final String name, final boolean missingKeytab, final Path tempDir) throws IOException { - final Settings.Builder builder = Settings.builder(); - if (missingKeytab == false) { - KerberosTestCase.writeKeyTab(tempDir.resolve(name + ".keytab"), null); - } - builder.put(KerberosTestCase.buildKerberosRealmSettings(tempDir.resolve(name + ".keytab").toString())); - return realm(name, KerberosRealmSettings.TYPE, builder).build(); - } - - private Settings.Builder realm(final String name, final String type, final Settings.Builder settings) { - final String prefix = RealmSettings.PREFIX + name + "."; - if (type != null) { - settings.put("type", type); - } - final Settings.Builder builder = Settings.builder().put(settings.normalizePrefix(prefix).build(), false); - return builder; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 43536abaf29..0f972498ab3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -11,17 +11,30 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; import org.ietf.jgss.GSSException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.Arrays; +import java.util.EnumSet; +import java.util.Set; import javax.security.auth.login.LoginException; @@ -31,6 +44,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.AdditionalMatchers.aryEq; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -94,4 +108,44 @@ public class KerberosRealmTests extends KerberosRealmTestCase { assertThat(future.actionGet(), is(nullValue())); } + public void testKerberosRealmWithInvalidKeytabPathConfigurations() throws IOException { + final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions"); + final String expectedErrorMessage; + final String keytabPath; + final Set filePerms; + switch (keytabPathCase) { + case "keytabPathAsDirectory": + final String dirName = randomAlphaOfLength(5); + Files.createDirectory(dir.resolve(dirName)); + keytabPath = dir.resolve(dirName).toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] is a directory"; + break; + case "keytabFileDoesNotExist": + keytabPath = dir.resolve(randomAlphaOfLength(5) + ".keytab").toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist"; + break; + case "keytabPathWithNoReadPermissions": + filePerms = PosixFilePermissions.fromString("---------"); + final String keytabFileName = randomAlphaOfLength(5) + ".keytab"; + final FileAttribute> fileAttributes = PosixFilePermissions.asFileAttribute(filePerms); + try (SeekableByteChannel byteChannel = Files.newByteChannel(dir.resolve(keytabFileName), + EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), fileAttributes)) { + byteChannel.write(ByteBuffer.wrap(randomByteArrayOfLength(10))); + } + keytabPath = dir.resolve(keytabFileName).toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission"; + break; + default: + throw new IllegalArgumentException("Unknown test case :" + keytabPathCase); + } + + settings = KerberosTestCase.buildKerberosRealmSettings(keytabPath, 100, "10m", true, randomBoolean()); + config = new RealmConfig("test-kerb-realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), + new ThreadContext(globalSettings)); + mockNativeRoleMappingStore = roleMappingStore(Arrays.asList("user")); + mockKerberosTicketValidator = mock(KerberosTicketValidator.class); + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> new KerberosRealm(config, mockNativeRoleMappingStore, mockKerberosTicketValidator, threadPool, null)); + assertThat(iae.getMessage(), is(equalTo(expectedErrorMessage))); + } } \ No newline at end of file From d4ea440e37dcc870c0a9ba63a2c005ae974729b2 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Tue, 31 Jul 2018 11:18:08 +1000 Subject: [PATCH 10/22] [Kerberos] Add missing javadocs (#32469) This commit adds missing javadocs and fixes few where the build failed when using JDK 11 for compilation. Closes#32461 --- .../authc/kerberos/KerberosTestCase.java | 10 +++++----- .../authc/kerberos/SimpleKdcLdapServer.java | 4 ++-- .../security/authc/kerberos/SpnegoClient.java | 16 +++++++--------- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java index 891f400c7be..cad9f689d3d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -133,7 +133,7 @@ public abstract class KerberosTestCase extends ESTestCase { * @param dir Directory where the key tab would be created. * @param princNames principal names to be created * @return {@link Path} to key tab file. - * @throws Exception + * @throws Exception thrown if principal or keytab could not be created */ protected Path createPrincipalKeyTab(final Path dir, final String... princNames) throws Exception { final Path path = dir.resolve(randomAlphaOfLength(10) + ".keytab"); @@ -146,7 +146,7 @@ public abstract class KerberosTestCase extends ESTestCase { * * @param principalName Principal name * @param password Password - * @throws Exception + * @throws Exception thrown if principal could not be created */ protected void createPrincipal(final String principalName, final char[] password) throws Exception { simpleKdcLdapServer.createPrincipal(principalName, new String(password)); @@ -168,8 +168,8 @@ public abstract class KerberosTestCase extends ESTestCase { * @param subject {@link Subject} * @param action {@link PrivilegedExceptionAction} action for performing inside * Subject.doAs - * @return Type of value as returned by PrivilegedAction - * @throws PrivilegedActionException + * @return T Type of value as returned by PrivilegedAction + * @throws PrivilegedActionException when privileged action threw exception */ static T doAsWrapper(final Subject subject, final PrivilegedExceptionAction action) throws PrivilegedActionException { return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAs(subject, action)); @@ -181,7 +181,7 @@ public abstract class KerberosTestCase extends ESTestCase { * @param keytabPath {@link Path} to keytab file. * @param content Content for keytab * @return key tab path - * @throws IOException + * @throws IOException if I/O error occurs while writing keytab file */ public static Path writeKeyTab(final Path keytabPath, final String content) throws IOException { try (BufferedWriter bufferedWriter = Files.newBufferedWriter(keytabPath, StandardCharsets.US_ASCII)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java index 426cacb1a03..53bf9e2b78d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java @@ -66,7 +66,7 @@ public class SimpleKdcLdapServer { * @param orgName Org name for base dn * @param domainName domain name for base dn * @param ldiff for ldap directory. - * @throws Exception + * @throws Exception when KDC or Ldap server initialization fails */ public SimpleKdcLdapServer(final Path workDir, final String orgName, final String domainName, final Path ldiff) throws Exception { this.workDir = workDir; @@ -194,7 +194,7 @@ public class SimpleKdcLdapServer { /** * Stop Simple Kdc Server * - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ public synchronized void stop() throws PrivilegedActionException { AccessController.doPrivileged(new PrivilegedExceptionAction() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java index 1f883b928bd..7fcca3d9f58 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java @@ -11,7 +11,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.security.authc.kerberos.KerberosTicketValidator; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; @@ -67,8 +66,8 @@ class SpnegoClient implements AutoCloseable { * @param password password for client * @param servicePrincipalName Service principal name with whom this client * interacts with. - * @throws PrivilegedActionException - * @throws GSSException + * @throws PrivilegedActionException when privileged action threw exception + * @throws GSSException thrown when GSS API error occurs */ SpnegoClient(final String userPrincipalName, final SecureString password, final String servicePrincipalName) throws PrivilegedActionException, GSSException { @@ -99,7 +98,7 @@ class SpnegoClient implements AutoCloseable { * base64 encoded token to be sent to server. * * @return Base64 encoded token - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ String getBase64EncodedTokenForSpnegoHeader() throws PrivilegedActionException { final byte[] outToken = KerberosTestCase.doAsWrapper(loginContext.getSubject(), @@ -114,7 +113,7 @@ class SpnegoClient implements AutoCloseable { * gss negotiation * @return Base64 encoded token to be sent to server. May return {@code null} if * nothing to be sent. - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ String handleResponse(final String base64Token) throws PrivilegedActionException { if (gssContext.isEstablished()) { @@ -160,10 +159,9 @@ class SpnegoClient implements AutoCloseable { * * @param principal Principal name * @param password {@link SecureString} - * @param settings {@link Settings} * @return authenticated {@link LoginContext} instance. Note: This needs to be * closed {@link LoginContext#logout()} after usage. - * @throws LoginException + * @throws LoginException thrown if problem with login configuration or when login fails */ private static LoginContext loginUsingPassword(final String principal, final SecureString password) throws LoginException { final Set principals = Collections.singleton(new KerberosPrincipal(principal)); @@ -182,8 +180,8 @@ class SpnegoClient implements AutoCloseable { * Instead of an additional file setting as we do not want the options to be * customizable we are constructing it in memory. *

- * As we are uing this instead of jaas.conf, this requires refresh of - * {@link Configuration} and reqires appropriate security permissions to do so. + * As we are using this instead of jaas.conf, this requires refresh of + * {@link Configuration} and requires appropriate security permissions to do so. */ static class PasswordJaasConf extends Configuration { private final String principal; From 8b57e2e5ba7de60f07d411fec0300cb2178790ba Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 31 Jul 2018 08:25:21 +0100 Subject: [PATCH 11/22] Fix calculation of orientation of polygons (#27967) The method for working out whether a polygon is clockwise or anticlockwise is mostly correct but doesn't work in some rare cases such as the included test case. This commit fixes that. --- .../common/geo/builders/PolygonBuilder.java | 41 +++++++++++++++---- .../geo/builders/PolygonBuilderTests.java | 18 ++++++++ 2 files changed, 51 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index dbcb46d9936..e4d48b8df45 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -46,6 +46,8 @@ import java.util.Locale; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; +import static org.apache.lucene.geo.GeoUtils.orient; + /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains * Methods to wrap polygons at the dateline and building shapes from the data held by the @@ -642,14 +644,8 @@ public class PolygonBuilder extends ShapeBuilder { */ private static Edge[] ring(int component, boolean direction, boolean handedness, Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { - // calculate the direction of the points: - // find the point a the top of the set and check its - // neighbors orientation. So direction is equivalent - // to clockwise/counterclockwise - final int top = top(points, offset, length); - final int prev = (offset + ((top + length - 1) % length)); - final int next = (offset + ((top + 1) % length)); - boolean orientation = points[offset + prev].x > points[offset + next].x; + + boolean orientation = getOrientation(points, offset, length); // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards @@ -678,6 +674,35 @@ public class PolygonBuilder extends ShapeBuilder { return concat(component, direction ^ orientation, points, offset, edges, toffset, length); } + /** + * @return whether the points are clockwise (true) or anticlockwise (false) + */ + private static boolean getOrientation(Coordinate[] points, int offset, int length) { + // calculate the direction of the points: find the southernmost point + // and check its neighbors orientation. + + final int top = top(points, offset, length); + final int prev = (top + length - 1) % length; + final int next = (top + 1) % length; + + final int determinantSign = orient( + points[offset + prev].x, points[offset + prev].y, + points[offset + top].x, points[offset + top].y, + points[offset + next].x, points[offset + next].y); + + if (determinantSign == 0) { + // Points are collinear, but `top` is not in the middle if so, so the edges either side of `top` are intersecting. + throw new InvalidShapeException("Cannot determine orientation: edges adjacent to (" + + points[offset + top].x + "," + points[offset +top].y + ") coincide"); + } + + return determinantSign < 0; + } + + /** + * @return the (offset) index of the point that is furthest west amongst + * those points that are the furthest south in the set. + */ private static int top(Coordinate[] points, int offset, int length) { int top = 0; // we start at 1 here since top points to 0 for (int i = 1; i < length; i++) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index 7f8b893caf0..83bf2bae6a6 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -143,4 +143,22 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase Date: Tue, 31 Jul 2018 09:41:51 +0200 Subject: [PATCH 12/22] High-level client: fix clusterAlias parsing in SearchHit (#32465) When using cross-cluster search through the high-level REST client, the cluster alias from each search hit was not parsed correctly. It would be part of the index field initially, but overridden just a few lines later once setting the shard target (in case we have enough info to build it from the response). In any case, getClusterAlias returns `null` which is a bug. With this change we rather parse back clusterAliases from the index name, set its corresponding field and properly handle the two possible cases depending on whether we can or cannot build the shard target object. --- .../org/elasticsearch/search/SearchHit.java | 57 ++++++++++++------- .../search/SearchShardTarget.java | 6 +- .../elasticsearch/search/SearchHitTests.java | 5 +- 3 files changed, 44 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 66999c7e389..28a600c0d21 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -19,6 +19,16 @@ package org.elasticsearch.search; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.OriginalIndices; @@ -51,16 +61,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.transport.RemoteClusterAware; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; @@ -107,6 +107,9 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable fields = get(Fields.FIELDS, values, Collections.emptyMap()); SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields); - searchHit.index = get(Fields._INDEX, values, null); + String index = get(Fields._INDEX, values, null); + String clusterAlias = null; + if (index != null) { + int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = index.substring(0, indexOf); + index = index.substring(indexOf + 1); + } + } + ShardId shardId = get(Fields._SHARD, values, null); + String nodeId = get(Fields._NODE, values, null); + if (shardId != null && nodeId != null) { + assert shardId.getIndexName().equals(index); + searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); + } else { + //these fields get set anyways when setting the shard target, + //but we set them explicitly when we don't have enough info to rebuild the shard target + searchHit.index = index; + searchHit.clusterAlias = clusterAlias; + } searchHit.score(get(Fields._SCORE, values, DEFAULT_SCORE)); searchHit.version(get(Fields._VERSION, values, -1L)); searchHit.sortValues(get(Fields.SORT, values, SearchSortValues.EMPTY)); @@ -561,12 +583,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable matchedQueries = get(Fields.MATCHED_QUERIES, values, null); if (matchedQueries != null) { - searchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()])); - } - ShardId shardId = get(Fields._SHARD, values, null); - String nodeId = get(Fields._NODE, values, null); - if (shardId != null && nodeId != null) { - searchHit.shard(new SearchShardTarget(nodeId, shardId, null, OriginalIndices.NONE)); + searchHit.matchedQueries(matchedQueries.toArray(new String[0])); } return searchHit; } @@ -842,13 +859,15 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable Date: Tue, 31 Jul 2018 10:29:22 +0200 Subject: [PATCH 13/22] HLRC: Add delete watch action (#32337) Adds the "delete watch" API to the High-Level Rest Client. Relates #29827 --- .../client/RequestConverters.java | 13 ++ .../elasticsearch/client/WatcherClient.java | 30 +++++ .../client/RequestConvertersTests.java | 12 ++ .../org/elasticsearch/client/WatcherIT.java | 37 +++++- .../documentation/WatcherDocumentationIT.java | 45 ++++++- .../high-level/supported-apis.asciidoc | 3 + .../x-pack/watcher/delete-watch.asciidoc | 53 ++++++++ .../x-pack/watcher/put-watch.asciidoc | 8 +- .../core/watcher/client/WatcherClient.java | 4 +- .../actions/delete/DeleteWatchAction.java | 1 + .../delete/DeleteWatchRequestBuilder.java | 2 + .../actions/delete/DeleteWatchResponse.java | 56 -------- .../exporter/local/LocalExporter.java | 2 +- .../rest/action/RestDeleteWatchAction.java | 4 +- .../delete/TransportDeleteWatchAction.java | 4 +- .../test/integration/BasicWatcherTests.java | 2 +- .../action/WatchRequestValidationTests.java | 2 +- .../action/delete/DeleteWatchTests.java | 2 +- .../xpack/watcher}/DeleteWatchRequest.java | 24 +++- .../xpack/watcher/DeleteWatchResponse.java | 123 ++++++++++++++++++ .../watcher/DeleteWatchResponseTests.java | 45 +++++++ 21 files changed, 391 insertions(+), 81 deletions(-) create mode 100644 docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete => protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher}/DeleteWatchRequest.java (67%) create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index b9360877dfc..ce6fd1c8c94 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -107,6 +107,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; @@ -1133,6 +1134,18 @@ final class RequestConverters { return request; } + static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(deleteWatchRequest.getId()) + .build(); + + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + return request; + } + static Request xpackUsage(XPackUsageRequest usageRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); Params parameters = new Params(request); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java index 73c92ba5c45..48487926f02 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -19,12 +19,15 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import java.io.IOException; import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; public final class WatcherClient { @@ -61,4 +64,31 @@ public final class WatcherClient { restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, PutWatchResponse::fromXContent, listener, emptySet()); } + + /** + * Deletes a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + DeleteWatchResponse::fromXContent, singleton(404)); + } + + /** + * Asynchronously deletes a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteWatchAsync(DeleteWatchRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + DeleteWatchResponse::fromXContent, listener, singleton(404)); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 0415d363c54..e4aa690acb6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -126,6 +126,7 @@ import org.elasticsearch.index.rankeval.RankEvalSpec; import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -2580,6 +2581,17 @@ public class RequestConvertersTests extends ESTestCase { assertThat(bos.toString("UTF-8"), is(body)); } + public void testXPackDeleteWatch() { + DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); + String watchId = randomAlphaOfLength(10); + deleteWatchRequest.setId(watchId); + + Request request = RequestConverters.xPackWatcherDeleteWatch(deleteWatchRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertThat(request.getEntity(), nullValue()); + } + /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java index dec438a47ab..67d1def323a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -21,6 +21,8 @@ package org.elasticsearch.client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; @@ -30,6 +32,13 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { public void testPutWatch() throws Exception { String watchId = randomAlphaOfLength(10); + PutWatchResponse putWatchResponse = createWatch(watchId); + assertThat(putWatchResponse.isCreated(), is(true)); + assertThat(putWatchResponse.getId(), is(watchId)); + assertThat(putWatchResponse.getVersion(), is(1L)); + } + + private PutWatchResponse createWatch(String watchId) throws Exception { String json = "{ \n" + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + " \"input\": { \"none\": {} },\n" + @@ -37,10 +46,30 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { "}"; BytesReference bytesReference = new BytesArray(json); PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON); - PutWatchResponse putWatchResponse = highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); - assertThat(putWatchResponse.isCreated(), is(true)); - assertThat(putWatchResponse.getId(), is(watchId)); - assertThat(putWatchResponse.getVersion(), is(1L)); + return highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + } + + public void testDeleteWatch() throws Exception { + // delete watch that exists + { + String watchId = randomAlphaOfLength(10); + createWatch(watchId); + DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(deleteWatchResponse.getId(), is(watchId)); + assertThat(deleteWatchResponse.getVersion(), is(2L)); + assertThat(deleteWatchResponse.isFound(), is(true)); + } + + // delete watch that does not exist + { + String watchId = randomAlphaOfLength(10); + DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(deleteWatchResponse.getId(), is(watchId)); + assertThat(deleteWatchResponse.getVersion(), is(1L)); + assertThat(deleteWatchResponse.isFound(), is(false)); + } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java index df51d896cda..47f8510b746 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -26,6 +26,8 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; @@ -34,7 +36,7 @@ import java.util.concurrent.TimeUnit; public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { - public void testPutWatch() throws Exception { + public void testWatcher() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -88,5 +90,46 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + { + //tag::x-pack-delete-watch-execute + DeleteWatchRequest request = new DeleteWatchRequest("my_watch_id"); + DeleteWatchResponse response = client.xpack().watcher().deleteWatch(request, RequestOptions.DEFAULT); + //end::x-pack-delete-watch-execute + + //tag::x-pack-delete-watch-response + String watchId = response.getId(); // <1> + boolean found = response.isFound(); // <2> + long version = response.getVersion(); // <3> + //end::x-pack-delete-watch-response + } + + { + DeleteWatchRequest request = new DeleteWatchRequest("my_other_watch_id"); + // tag::x-pack-delete-watch-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeleteWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-delete-watch-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-delete-watch-execute-async + client.xpack().watcher().deleteWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-delete-watch-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } } + } diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 25fbcaaaeaa..63aef865955 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -54,11 +54,14 @@ The Java High Level REST Client supports the following Miscellaneous APIs: * <> * <> * <> +* <> +* <> include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] include::x-pack/x-pack-info.asciidoc[] include::x-pack/watcher/put-watch.asciidoc[] +include::x-pack/watcher/delete-watch.asciidoc[] == Indices APIs diff --git a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc new file mode 100644 index 00000000000..d5f35817558 --- /dev/null +++ b/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc @@ -0,0 +1,53 @@ +[[java-rest-high-x-pack-watcher-delete-watch]] +=== X-Pack Delete Watch API + +[[java-rest-high-x-pack-watcher-delete-watch-execution]] +==== Execution + +A watch can be deleted as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-watcher-delete-watch-response]] +==== Response + +The returned `DeleteWatchResponse` contains `found`, `id`, +and `version` information. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-response] +-------------------------------------------------- +<1> `_id` contains id of the watch +<2> `found` is a boolean indicating whether the watch was found +<3> `_version` returns the version of the deleted watch + +[[java-rest-high-x-pack-watcher-delete-watch-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute-async] +-------------------------------------------------- +<1> The `DeleteWatchRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteWatchResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc index c803c54eb5e..a76ba407a1a 100644 --- a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc +++ b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-put-watch]] -=== X-Pack Info API +=== X-Pack Put Watch API [[java-rest-high-x-pack-watcher-put-watch-execution]] ==== Execution @@ -16,7 +16,7 @@ include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute [[java-rest-high-x-pack-watcher-put-watch-response]] ==== Response -The returned `XPackPutWatchResponse` contain `created`, `id`, +The returned `PutWatchResponse` contains `created`, `id`, and `version` information. ["source","java",subs="attributes,callouts,macros"] @@ -36,7 +36,7 @@ This request can be executed asynchronously: -------------------------------------------------- include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-async] -------------------------------------------------- -<1> The `XPackPutWatchRequest` to execute and the `ActionListener` to use when +<1> The `PutWatchRequest` to execute and the `ActionListener` to use when the execution completes The asynchronous method does not block and returns immediately. Once it is @@ -44,7 +44,7 @@ completed the `ActionListener` is called back using the `onResponse` method if the execution successfully completed or using the `onFailure` method if it failed. -A typical listener for `XPackPutWatchResponse` looks like: +A typical listener for `PutWatchResponse` looks like: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java index 063f1f655a4..b9458c23ddc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java @@ -19,9 +19,9 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateW import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequestBuilder; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequestBuilder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index 8a16755a6db..eb440ddc251 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.delete; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; /** * This action deletes an watch from in memory, the scheduler and the index diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java index afea505aa4a..6779eec1ddf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.delete; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; /** * A delete document action request builder. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java deleted file mode 100644 index bda5ecabaa0..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.watcher.transport.actions.delete; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -public class DeleteWatchResponse extends ActionResponse { - - private String id; - private long version; - private boolean found; - - public DeleteWatchResponse() { - } - - public DeleteWatchResponse(String id, long version, boolean found) { - this.id = id; - this.version = version; - this.found = found; - } - - public String getId() { - return id; - } - - public long getVersion() { - return version; - } - - public boolean isFound() { - return found; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - version = in.readVLong(); - found = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeVLong(version); - out.writeBoolean(found); - } -} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 08618b45cf5..6183dd8665f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -46,7 +46,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java index e41d52c1954..28eb884bbe8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java @@ -14,8 +14,8 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; import java.io.IOException; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java index d7ff25b623f..9cc94820554 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java @@ -18,8 +18,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import java.util.function.Supplier; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 18f151b4f6a..c07e6ba3db1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.CompareCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java index 64b913f96a0..62985cad1f1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index 76f71b9c95e..2f502cb95aa 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java similarity index 67% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java index f8c1a71f1eb..1ec83a8c05a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.watcher.transport.actions.delete; +package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -11,7 +24,6 @@ import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.xpack.core.watcher.support.WatcherUtils; import java.io.IOException; @@ -50,7 +62,7 @@ public class DeleteWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (id == null){ validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (WatcherUtils.isValidId(id) == false) { + } else if (PutWatchRequest.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } return validationException; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java new file mode 100644 index 00000000000..b644a6a854c --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new); + static { + PARSER.declareString(DeleteWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(DeleteWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(DeleteWatchResponse::setFound, new ParseField("found")); + } + + private String id; + private long version; + private boolean found; + + public DeleteWatchResponse() { + } + + public DeleteWatchResponse(String id, long version, boolean found) { + this.id = id; + this.version = version; + this.found = found; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isFound() { + return found; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setFound(boolean found) { + this.found = found; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DeleteWatchResponse that = (DeleteWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(found, that.found); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, found); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("found", found) + .endObject(); + } + + public static DeleteWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java new file mode 100644 index 00000000000..1dbc4cec321 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean found = randomBoolean(); + return new DeleteWatchResponse(id, version, found); + } + + @Override + protected DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} From 9703d06321913dab9e59a91b5e4aef41fede6b29 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 31 Jul 2018 13:28:49 +0200 Subject: [PATCH 14/22] Mute QueryProfilerIT#testProfileMatchesRegular() Relates #32492 --- .../org/elasticsearch/search/profile/query/QueryProfilerIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6de15146fae..4f725ed2f11 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -105,6 +105,7 @@ public class QueryProfilerIT extends ESIntegTestCase { * search for each query. It then does some basic sanity checking of score and hits * to make sure the profiling doesn't interfere with the hits being returned */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32492") public void testProfileMatchesRegular() throws Exception { createIndex("test"); ensureGreen(); From 97b379e0d4fdcf04c769788644676f9555e1f672 Mon Sep 17 00:00:00 2001 From: Colm O'Shea Date: Tue, 31 Jul 2018 13:33:23 +0100 Subject: [PATCH 15/22] fix no=>not typo (#32463) Found a tiny typo while reading the docs --- docs/reference/aggregations/bucket/terms-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 551c082a662..f42d176aea0 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -653,7 +653,7 @@ GET /_search // CONSOLE In the above example, buckets will be created for all the tags that has the word `sport` in them, except those starting -with `water_` (so the tag `water_sports` will no be aggregated). The `include` regular expression will determine what +with `water_` (so the tag `water_sports` will not be aggregated). The `include` regular expression will determine what values are "allowed" to be aggregated, while the `exclude` determines the values that should not be aggregated. When both are defined, the `exclude` has precedence, meaning, the `include` is evaluated first and only then the `exclude`. From 6f93911955dfe0e694110271cb9dd0ab2991240d Mon Sep 17 00:00:00 2001 From: Paul Sanwald Date: Tue, 31 Jul 2018 08:52:16 -0400 Subject: [PATCH 16/22] Fix AutoIntervalDateHistogram.testReduce random failures (#32301) 1. Refactor the test to use the same roundings as the implementation. 2. Refactor the test verification logic to use `innerIntervals` when rounding. --- .../AutoDateHistogramAggregationBuilder.java | 41 ++++++++---- .../histogram/InternalAutoDateHistogram.java | 8 ++- .../InternalAutoDateHistogramTests.java | 64 +++++++++++++------ 3 files changed, 78 insertions(+), 35 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index 366060835d8..50a0c85c041 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.internal.SearchContext; +import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.Arrays; @@ -53,7 +54,7 @@ public class AutoDateHistogramAggregationBuilder public static final String NAME = "auto_date_histogram"; - public static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); + private static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); private static final ObjectParser PARSER; static { @@ -63,6 +64,29 @@ public class AutoDateHistogramAggregationBuilder PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); } + /** + * + * Build roundings, computed dynamically as roundings are time zone dependent. + * The current implementation probably should not be invoked in a tight loop. + * @return Array of RoundingInfo + */ + static RoundingInfo[] buildRoundings(DateTimeZone timeZone) { + RoundingInfo[] roundings = new RoundingInfo[6]; + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), + 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, 1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone), + 24 * 60 * 60 * 1000L, 1, 7); + roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone), + 30 * 24 * 60 * 60 * 1000L, 1, 3); + roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone), + 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + return roundings; + } + public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null); } @@ -116,14 +140,7 @@ public class AutoDateHistogramAggregationBuilder @Override protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - RoundingInfo[] roundings = new RoundingInfo[6]; - roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30); - roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30); - roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12); - roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7); - roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3); - roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); - + RoundingInfo[] roundings = buildRoundings(timeZone()); int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) .map(rounding -> rounding.innerIntervals) .flatMapToInt(Arrays::stream) @@ -139,10 +156,10 @@ public class AutoDateHistogramAggregationBuilder return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); } - private Rounding createRounding(DateTimeUnit interval) { + private static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) { Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); - if (timeZone() != null) { - tzRoundingBuilder.timeZone(timeZone()); + if (timeZone != null) { + tzRoundingBuilder.timeZone(timeZone); } Rounding rounding = tzRoundingBuilder.build(); return rounding; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 27c195cbdae..6a78ca67249 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -447,7 +447,8 @@ public final class InternalAutoDateHistogram extends return new BucketReduceResult(list, roundingInfo, roundingIdx); } - private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) { + private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, + RoundingInfo[] roundings) { if (roundingIdx == roundings.length - 1) { return roundingIdx; } @@ -509,7 +510,8 @@ public final class InternalAutoDateHistogram extends pipelineAggregators(), getMetaData()); } - private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) { + private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, + ReduceContext reduceContext) { List buckets = reducedBucketsResult.buckets; RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo; int roundingIdx = reducedBucketsResult.roundingIdx; @@ -539,7 +541,7 @@ public final class InternalAutoDateHistogram extends key = roundingInfo.rounding.round(bucket.key); } reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.docCount, format, bucket.aggregations)); } if (sameKeyedBuckets.isEmpty() == false) { reduceContext.consumeBucketsAndMaybeBreak(1); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 96811ce424c..a14fca63154 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -51,14 +49,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati public void setUp() throws Exception { super.setUp(); format = randomNumericDocValueFormat(); - - roundingInfos = new RoundingInfo[6]; - roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30); - roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30); - roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12); - roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7); - roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3); - roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100); } @Override @@ -66,6 +56,8 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati List pipelineAggregators, Map metaData, InternalAggregations aggregations) { + + roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null); int nbBuckets = randomNumberOfBuckets(); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); List buckets = new ArrayList<>(nbBuckets); @@ -81,6 +73,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); } @@ -92,13 +85,50 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati roundingIdx = histogram.getBucketInfo().roundingIdx; } } - Map expectedCounts = new TreeMap<>(); - for (Histogram histogram : inputs) { + RoundingInfo roundingInfo = roundingInfos[roundingIdx]; + + long lowest = Long.MAX_VALUE; + long highest = 0; + for (InternalAutoDateHistogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + if (bucketKey < lowest) { + lowest = bucketKey; + } + if (bucketKey > highest) { + highest = bucketKey; + } } } + long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); + long innerIntervalToUse = 0; + for (int interval : roundingInfo.innerIntervals) { + if (normalizedDuration / interval < maxNumberOfBuckets()) { + innerIntervalToUse = interval; + } + } + Map expectedCounts = new TreeMap<>(); + long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis(); + for (long keyForBucket = roundingInfo.rounding.round(lowest); + keyForBucket <= highest; + keyForBucket = keyForBucket + intervalInMillis) { + expectedCounts.put(keyForBucket, 0L); + + for (InternalAutoDateHistogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + long roundedBucketKey = roundingInfo.rounding.round(bucketKey); + if (roundedBucketKey >= keyForBucket + && roundedBucketKey < keyForBucket + intervalInMillis) { + long count = bucket.getDocCount(); + expectedCounts.compute(keyForBucket, + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + count); + } + } + } + } + + Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), @@ -117,12 +147,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati return ParsedAutoDateHistogram.class; } - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") - public void testReduceRandom() { - super.testReduceRandom(); - } - @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName(); From adb93da974f88d7939eeac2fbe9066d4555ca586 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 31 Jul 2018 15:04:07 +0200 Subject: [PATCH 17/22] Mute KerberosAuthenticationIT Relates #32498 --- .../security/authc/kerberos/KerberosAuthenticationIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java index d5928cb58f6..a36040d1ba8 100644 --- a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java +++ b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java @@ -82,6 +82,7 @@ public class KerberosAuthenticationIT extends ESRestTestCase { assertOK(response); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32498") public void testLoginByKeytab() throws IOException, PrivilegedActionException { final String userPrincipalName = System.getProperty(TEST_USER_WITH_KEYTAB_KEY); final String keytabPath = System.getProperty(TEST_USER_WITH_KEYTAB_PATH_KEY); @@ -91,6 +92,7 @@ public class KerberosAuthenticationIT extends ESRestTestCase { executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32498") public void testLoginByUsernamePassword() throws IOException, PrivilegedActionException { final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); @@ -100,6 +102,10 @@ public class KerberosAuthenticationIT extends ESRestTestCase { executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); } + public void testSoDoesNotFailWithNoTests() { + // intentionally empty - this is just needed to ensure the build does not fail because we mute its only test. + } + private void executeRequestAndVerifyResponse(final String userPrincipalName, final SpnegoHttpClientConfigCallbackHandler callbackHandler) throws PrivilegedActionException, IOException { final Request request = new Request("GET", "/_xpack/security/_authenticate"); From 4fa92cbf49326b45509ce2593f962f276282dca9 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Tue, 31 Jul 2018 16:11:17 +0200 Subject: [PATCH 18/22] Changed ReindexRequest to use Writeable.Reader (#32401) -- This is a pre-stage for adding the reindex API to the REST high-level-client -- Follows the pattern set in #26315 --- .../reindex/TransportDeleteByQueryAction.java | 4 +-- .../index/reindex/TransportReindexAction.java | 3 +- .../reindex/TransportUpdateByQueryAction.java | 4 +-- .../index/reindex/RoundTripTests.java | 35 ++++++++----------- .../index/reindex/DeleteByQueryRequest.java | 7 ++++ .../index/reindex/ReindexRequest.java | 12 ++++--- .../index/reindex/UpdateByQueryRequest.java | 8 +++-- 7 files changed, 42 insertions(+), 31 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index c1defe56adc..706f2c0b8f8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -27,13 +27,13 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.function.Supplier; public class TransportDeleteByQueryAction extends HandledTransportAction { @@ -46,7 +46,7 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + (Writeable.Reader) DeleteByQueryRequest::new); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index e54b5f50ae6..1d80c28b585 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.action.index.IndexRequest; @@ -104,7 +105,7 @@ public class TransportReindexAction extends HandledTransportAction)ReindexRequest::new); this.threadPool = threadPool; this.clusterService = clusterService; this.scriptService = scriptService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 34ae3fdd0c6..00d14822ba0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -43,7 +44,6 @@ import org.elasticsearch.transport.TransportService; import java.util.Map; import java.util.function.BiFunction; -import java.util.function.Supplier; public class TransportUpdateByQueryAction extends HandledTransportAction { @@ -56,7 +56,7 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + (Writeable.Reader) UpdateByQueryRequest::new); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 2dc4b59e8d9..97809c9bc8d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -67,19 +67,17 @@ public class RoundTripTests extends ESTestCase { new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null, query, username, password, headers, socketTimeout, connectTimeout)); } - ReindexRequest tripped = new ReindexRequest(); - roundTrip(reindex, tripped); + ReindexRequest tripped = new ReindexRequest(toInputByteStream(reindex)); assertRequestEquals(reindex, tripped); // Try slices=auto with a version that doesn't support it, which should fail reindex.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, reindex, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, reindex)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new ReindexRequest(); reindex.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, reindex, tripped); + tripped = new ReindexRequest(toInputByteStream(reindex)); assertRequestEquals(Version.V_6_0_0_alpha1, reindex, tripped); } @@ -89,20 +87,18 @@ public class RoundTripTests extends ESTestCase { if (randomBoolean()) { update.setPipeline(randomAlphaOfLength(5)); } - UpdateByQueryRequest tripped = new UpdateByQueryRequest(); - roundTrip(update, tripped); + UpdateByQueryRequest tripped = new UpdateByQueryRequest(toInputByteStream(update)); assertRequestEquals(update, tripped); assertEquals(update.getPipeline(), tripped.getPipeline()); // Try slices=auto with a version that doesn't support it, which should fail update.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, update, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, update)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new UpdateByQueryRequest(); update.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, update, tripped); + tripped = new UpdateByQueryRequest(toInputByteStream(update)); assertRequestEquals(update, tripped); assertEquals(update.getPipeline(), tripped.getPipeline()); } @@ -110,19 +106,17 @@ public class RoundTripTests extends ESTestCase { public void testDeleteByQueryRequest() throws IOException { DeleteByQueryRequest delete = new DeleteByQueryRequest(new SearchRequest()); randomRequest(delete); - DeleteByQueryRequest tripped = new DeleteByQueryRequest(); - roundTrip(delete, tripped); + DeleteByQueryRequest tripped = new DeleteByQueryRequest(toInputByteStream(delete)); assertRequestEquals(delete, tripped); // Try slices=auto with a version that doesn't support it, which should fail delete.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, delete, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, delete)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new DeleteByQueryRequest(); delete.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, delete, tripped); + tripped = new DeleteByQueryRequest(toInputByteStream(delete)); assertRequestEquals(delete, tripped); } @@ -198,23 +192,24 @@ public class RoundTripTests extends ESTestCase { request.setTaskId(new TaskId(randomAlphaOfLength(5), randomLong())); } RethrottleRequest tripped = new RethrottleRequest(); - roundTrip(request, tripped); + // We use readFrom here because Rethrottle does not support the Writeable.Reader interface + tripped.readFrom(toInputByteStream(request)); assertEquals(request.getRequestsPerSecond(), tripped.getRequestsPerSecond(), 0.00001); assertArrayEquals(request.getActions(), tripped.getActions()); assertEquals(request.getTaskId(), tripped.getTaskId()); } - private void roundTrip(Streamable example, Streamable empty) throws IOException { - roundTrip(Version.CURRENT, example, empty); + private StreamInput toInputByteStream(Streamable example) throws IOException { + return toInputByteStream(Version.CURRENT, example); } - private void roundTrip(Version version, Streamable example, Streamable empty) throws IOException { + private StreamInput toInputByteStream(Version version, Streamable example) throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.setVersion(version); example.writeTo(out); StreamInput in = out.bytes().streamInput(); in.setVersion(version); - empty.readFrom(in); + return in; } private Script randomScript() { diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java index aa8543175d9..f848e8722c7 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -23,8 +23,11 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.tasks.TaskId; +import java.io.IOException; + import static org.elasticsearch.action.ValidateActions.addValidationError; /** @@ -53,6 +56,10 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest Date: Tue, 31 Jul 2018 16:14:37 +0200 Subject: [PATCH 19/22] Docs: Fix README upgrade mention (#32313) The README still mentioned Elasticsearch 1.x. This commit links to the documentation instead and properly formats the last paragraph. --- README.textile | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/README.textile b/README.textile index ce7b3b7d344..aa61c035742 100644 --- a/README.textile +++ b/README.textile @@ -209,10 +209,6 @@ The distribution for each project will be created under the @build/distributions See the "TESTING":TESTING.asciidoc file for more information about running the Elasticsearch test suite. -h3. Upgrading from Elasticsearch 1.x? +h3. Upgrading from older Elasticsearch versions -In order to ensure a smooth upgrade process from earlier versions of -Elasticsearch (1.x), it is required to perform a full cluster restart. Please -see the "setup reference": -https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html -for more details on the upgrade process. +In order to ensure a smooth upgrade process from earlier versions of Elasticsearch, please see our "upgrade documentation":https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html for more details on the upgrade process. From b42765c82fa561506f12c0363b2e30e73d352497 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Tue, 31 Jul 2018 16:21:48 +0200 Subject: [PATCH 20/22] Increase max chunk size to 256Mb for repo-azure (#32101) Increase max chunk size to 256Mb for repo-azure --- plugins/repository-azure/build.gradle | 17 +- .../azure-keyvault-core-0.8.0.jar.sha1 | 1 - .../azure-keyvault-core-1.0.0.jar.sha1 | 1 + .../licenses/azure-storage-5.0.0.jar.sha1 | 1 - .../licenses/azure-storage-8.0.0.jar.sha1 | 1 + .../licenses/guava-20.0.jar.sha1 | 1 + .../licenses/guava-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/guava-NOTICE.txt | 0 .../azure/AzureStorageService.java | 5 +- .../azure/AzureRepositorySettingsTests.java | 6 +- 10 files changed, 227 insertions(+), 8 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/guava-20.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/guava-LICENSE.txt create mode 100644 plugins/repository-azure/licenses/guava-NOTICE.txt diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 6d7412f8250..13ec8031c0f 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -23,8 +23,9 @@ esplugin { } dependencies { - compile 'com.microsoft.azure:azure-storage:5.0.0' - compile 'com.microsoft.azure:azure-keyvault-core:0.8.0' + compile 'com.microsoft.azure:azure-storage:8.0.0' + compile 'com.microsoft.azure:azure-keyvault-core:1.0.0' + compile 'com.google.guava:guava:20.0' compile 'org.apache.commons:commons-lang3:3.4' } @@ -40,6 +41,18 @@ thirdPartyAudit.excludes = [ // Optional and not enabled by Elasticsearch 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', ] check { diff --git a/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 b/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 deleted file mode 100644 index b86c58db842..00000000000 --- a/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35f7ac687462f491d0f8b0d96733dfe347493d70 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 new file mode 100644 index 00000000000..8b44e99aee1 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 @@ -0,0 +1 @@ +e89dd5e621e21b753096ec6a03f203c01482c612 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 deleted file mode 100644 index 9882cb80204..00000000000 --- a/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ba8f04bfeac08016c0f88423a202d0f3aac03aed \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 new file mode 100644 index 00000000000..4e333ad8241 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 @@ -0,0 +1 @@ +f6759c16ade4e2a05bc1dfbaf55161b9ed0e78b9 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-20.0.jar.sha1 b/plugins/repository-azure/licenses/guava-20.0.jar.sha1 new file mode 100644 index 00000000000..7b6ae09060b --- /dev/null +++ b/plugins/repository-azure/licenses/guava-20.0.jar.sha1 @@ -0,0 +1 @@ +89507701249388e1ed5ddcf8c41f4ce1be7831ef \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-LICENSE.txt b/plugins/repository-azure/licenses/guava-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-azure/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-azure/licenses/guava-NOTICE.txt b/plugins/repository-azure/licenses/guava-NOTICE.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 9482182b02d..75bcc3e9c1e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -61,7 +61,10 @@ import static java.util.Collections.emptyMap; public class AzureStorageService extends AbstractComponent { public static final ByteSizeValue MIN_CHUNK_SIZE = new ByteSizeValue(1, ByteSizeUnit.BYTES); - public static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); + /** + * {@link com.microsoft.azure.storage.blob.BlobConstants#MAX_SINGLE_UPLOAD_BLOB_SIZE_IN_BYTES} + */ + public static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(256, ByteSizeUnit.MB); // 'package' for testing volatile Map storageSettings = emptyMap(); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index b4b71577cbc..43891a8e9d5 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -104,7 +104,7 @@ public class AzureRepositorySettingsTests extends ESTestCase { assertEquals(AzureStorageService.MAX_CHUNK_SIZE, azureRepository.chunkSize()); // chunk size in settings - int size = randomIntBetween(1, 64); + int size = randomIntBetween(1, 256); azureRepository = azureRepository(Settings.builder().put("chunk_size", size + "mb").build()); assertEquals(new ByteSizeValue(size, ByteSizeUnit.MB), azureRepository.chunkSize()); @@ -120,8 +120,8 @@ public class AzureRepositorySettingsTests extends ESTestCase { // greater than max chunk size not allowed e = expectThrows(IllegalArgumentException.class, () -> - azureRepository(Settings.builder().put("chunk_size", "65mb").build())); - assertEquals("failed to parse value [65mb] for setting [chunk_size], must be <= [64mb]", e.getMessage()); + azureRepository(Settings.builder().put("chunk_size", "257mb").build())); + assertEquals("failed to parse value [257mb] for setting [chunk_size], must be <= [256mb]", e.getMessage()); } } From 5f302580f9dea18b389007b861fc06b013e94499 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 31 Jul 2018 16:17:28 +0200 Subject: [PATCH 21/22] Mute SSLTrustRestrictionsTests on JDK 11 This commit adds an assumption to two test methods in SSLTrustRestrictionsTests that we are not on JDK 11 as the tests currently fail there. Relates #29989 --- .../elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index cf77ca975a4..e1896e01365 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ssl; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -173,6 +174,8 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { } public void testCertificateWithUntrustedNameFails() throws Exception { + // see https://github.com/elastic/elasticsearch/issues/29989 + assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*.trusted"); try { tryConnect(untrustedCert); @@ -183,6 +186,8 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { } public void testRestrictionsAreReloaded() throws Exception { + // see https://github.com/elastic/elasticsearch/issues/29989 + assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*"); assertBusy(() -> { try { From 22459576d757833d9dfbca59cb44b09fc8bd77b1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 31 Jul 2018 10:54:24 -0400 Subject: [PATCH 22/22] Logging: Make node name consistent in logger (#31588) First, some background: we have 15 different methods to get a logger in Elasticsearch but they can be broken down into three broad categories based on what information is provided when building the logger. Just a class like: ``` private static final Logger logger = ESLoggerFactory.getLogger(ActionModule.class); ``` or: ``` protected final Logger logger = Loggers.getLogger(getClass()); ``` The class and settings: ``` this.logger = Loggers.getLogger(getClass(), settings); ``` Or more information like: ``` Loggers.getLogger("index.store.deletes", settings, shardId) ``` The goal of the "class and settings" variant is to attach the node name to the logger. Because we don't always have the settings available, we often use the "just a class" variant and get loggers without node names attached. There isn't any real consistency here. Some loggers get the node name because it is convenient and some do not. This change makes the node name available to all loggers all the time. Almost. There are some caveats are testing that I'll get to. But in *production* code the node name is node available to all loggers. This means we can stop using the "class and settings" variants to fetch loggers which was the real goal here, but a pleasant side effect is that the ndoe name is now consitent on every log line and optional by editing the logging pattern. This is all powered by setting the node name statically on a logging formatter very early in initialization. Now to tests: tests can't set the node name statically because subclasses of `ESIntegTestCase` run many nodes in the same jvm, even in the same class loader. Also, lots of tests don't run with a real node so they don't *have* a node name at all. To support multiple nodes in the same JVM tests suss out the node name from the thread name which works surprisingly well and easy to test in a nice way. For those threads that are not part of an `ESIntegTestCase` node we stick whatever useful information we can get form the thread name in the place of the node name. This allows us to keep the logger format consistent. --- .../src/main/resources/log4j2.properties | 2 +- .../src/main/resources/log4j2.properties | 2 +- distribution/src/config/log4j2.properties | 10 +- docs/reference/index-modules/slowlog.asciidoc | 4 +- docs/reference/setup/logging-config.asciidoc | 2 +- .../bootstrap/EvilBootstrapChecksTests.java | 2 +- .../common/logging/EvilLoggerTests.java | 25 +++- .../common/logging/config/log4j2.properties | 7 +- .../logging/config/third/log4j2.properties | 2 +- .../logging/deprecation/log4j2.properties | 6 +- .../logging/find_appender/log4j2.properties | 2 +- .../logging/location_info/log4j2.properties | 4 +- .../logging/no_node_name/log4j2.properties | 25 ++++ .../common/logging/prefix/log4j2.properties | 4 +- .../common/logging/settings/log4j2.properties | 6 +- .../elasticsearch/bootstrap/Bootstrap.java | 8 +- .../bootstrap/BootstrapChecks.java | 10 +- ...ElasticsearchUncaughtExceptionHandler.java | 11 +- .../common/logging/ESLoggerFactory.java | 9 ++ .../common/logging/LogConfigurator.java | 73 +++++++++++- .../elasticsearch/common/logging/Loggers.java | 29 ++--- .../logging/NodeNamePatternConverter.java | 72 ++++++++++++ .../java/org/elasticsearch/node/Node.java | 17 ++- .../bootstrap/BootstrapChecksTests.java | 89 +++++++------- ...icsearchUncaughtExceptionHandlerTests.java | 4 +- .../index/engine/InternalEngineTests.java | 2 +- .../TestThreadInfoPatternConverter.java | 111 ++++++++++++++++++ .../org/elasticsearch/test/ESTestCase.java | 16 ++- .../src/main/resources/log4j2-test.properties | 2 +- .../resources/log4j2.component.properties | 9 ++ .../TestThreadInfoPatternConverterTests.java | 53 +++++++++ .../esnative/ESNativeRealmMigrateTool.java | 11 +- 32 files changed, 493 insertions(+), 136 deletions(-) create mode 100644 qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties create mode 100644 server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java create mode 100644 test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java create mode 100644 test/framework/src/main/resources/log4j2.component.properties create mode 100644 test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java diff --git a/benchmarks/src/main/resources/log4j2.properties b/benchmarks/src/main/resources/log4j2.properties index c3ae1fe56d3..808d611670f 100644 --- a/benchmarks/src/main/resources/log4j2.properties +++ b/benchmarks/src/main/resources/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n # Do not log at all if it is not really critical - we're in a benchmark rootLogger.level = error diff --git a/client/benchmark/src/main/resources/log4j2.properties b/client/benchmark/src/main/resources/log4j2.properties index 8652131bf49..a68c7f5653f 100644 --- a/client/benchmark/src/main/resources/log4j2.properties +++ b/client/benchmark/src/main/resources/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/distribution/src/config/log4j2.properties b/distribution/src/config/log4j2.properties index 589f529e83e..6de21cd48f6 100644 --- a/distribution/src/config/log4j2.properties +++ b/distribution/src/config/log4j2.properties @@ -7,13 +7,13 @@ logger.action.level = debug appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n appender.rolling.type = RollingFile appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.rolling.layout.type = PatternLayout -appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy @@ -38,7 +38,7 @@ appender.deprecation_rolling.type = RollingFile appender.deprecation_rolling.name = deprecation_rolling appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_rolling.layout.type = PatternLayout -appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.log.gz appender.deprecation_rolling.policies.type = Policies appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -55,7 +55,7 @@ appender.index_search_slowlog_rolling.type = RollingFile appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log appender.index_search_slowlog_rolling.layout.type = PatternLayout -appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz appender.index_search_slowlog_rolling.policies.type = Policies appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -72,7 +72,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log appender.index_indexing_slowlog_rolling.layout.type = PatternLayout -appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz appender.index_indexing_slowlog_rolling.policies.type = Policies appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy diff --git a/docs/reference/index-modules/slowlog.asciidoc b/docs/reference/index-modules/slowlog.asciidoc index 1dc84f380f5..235256bdce7 100644 --- a/docs/reference/index-modules/slowlog.asciidoc +++ b/docs/reference/index-modules/slowlog.asciidoc @@ -50,7 +50,7 @@ appender.index_search_slowlog_rolling.type = RollingFile appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log appender.index_search_slowlog_rolling.layout.type = PatternLayout -appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz appender.index_search_slowlog_rolling.policies.type = Policies appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -103,7 +103,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log appender.index_indexing_slowlog_rolling.layout.type = PatternLayout -appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz appender.index_indexing_slowlog_rolling.policies.type = Policies appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc index d35b3db2df7..ac949bc941a 100644 --- a/docs/reference/setup/logging-config.asciidoc +++ b/docs/reference/setup/logging-config.asciidoc @@ -25,7 +25,7 @@ appender.rolling.type = RollingFile <1> appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log <2> appender.rolling.layout.type = PatternLayout -appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz <3> appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy <4> diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java index 0dc9ea0a170..b034cd08d77 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java @@ -87,7 +87,7 @@ public class EvilBootstrapChecksTests extends ESTestCase { final boolean enforceLimits = randomBoolean(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList(), "testInvalidValue")); + () -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList())); final Matcher matcher = containsString( "[es.enforce.bootstrap.checks] must be [true] but was [" + value + "]"); assertThat(e, hasToString(matcher)); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index d4bc754689e..9285abad9b9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -56,6 +56,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.lessThan; @@ -319,9 +320,11 @@ public class EvilLoggerTests extends ESTestCase { assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength)); for (int i = 0; i < expectedLogLines; i++) { if (prefix == null) { - assertThat(events.get(i), startsWith("test")); + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "] test")); } else { - assertThat(events.get(i), startsWith("[" + prefix + "] test")); + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test")); } } } @@ -357,6 +360,24 @@ public class EvilLoggerTests extends ESTestCase { } } + public void testNoNodeNameWarning() throws IOException, UserException { + setupLogging("no_node_name"); + + final String path = + System.getProperty("es.logs.base_path") + + System.getProperty("file.separator") + + System.getProperty("es.logs.cluster_name") + ".log"; + final List events = Files.readAllLines(PathUtils.get(path)); + assertThat(events.size(), equalTo(2)); + final String location = "org.elasticsearch.common.logging.LogConfigurator"; + // the first message is a warning for unsupported configuration files + assertLogLine(events.get(0), Level.WARN, location, "\\[null\\] Some logging configurations have %marker but don't " + + "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users " + + "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace " + + "`%node_name` with `\\[%node_name\\]%marker ` in these locations:"); + assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + } + private void setupLogging(final String config) throws IOException, UserException { setupLogging(config, Settings.EMPTY); } diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties index 8b956421458..2c9f48a359a 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -23,10 +23,9 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = deprecation logger.deprecation.level = warn logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file logger.deprecation.additivity = false - diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties index ed794cb7c3b..8a9ec160fb1 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties @@ -1,7 +1,7 @@ appender.console3.type = Console appender.console3.name = console3 appender.console3.layout.type = PatternLayout -appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n logger.third.name = third logger.third.level = debug diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties index fd7af2ce731..388c9f9b2fc 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -17,7 +17,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = deprecation logger.deprecation.level = warn diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties index 8553ec5e791..d036e571cf0 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n logger.has_console_appender.name = has_console_appender logger.has_console_appender.level = trace diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties index 5fa85b5d156..ee59755a2d7 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties new file mode 100644 index 00000000000..fd7af2ce731 --- /dev/null +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties @@ -0,0 +1,25 @@ +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n + +appender.file.type = File +appender.file.name = file +appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = [%p][%l] %marker%m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.file.ref = file + +appender.deprecation_file.type = File +appender.deprecation_file.name = deprecation_file +appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log +appender.deprecation_file.layout.type = PatternLayout +appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n + +logger.deprecation.name = deprecation +logger.deprecation.level = warn +logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file +logger.deprecation.additivity = false diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties index 5dfa369a3bd..199fddc31c6 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = %marker%m%n +appender.file.layout.pattern = [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties index 1acb65511d6..abe4a279dc8 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -17,7 +17,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = org.elasticsearch.deprecation.common.settings logger.deprecation.level = warn diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 870c537e020..76db6db7674 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.logging.NodeNamePatternConverter; import org.elasticsearch.common.network.IfConfig; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureSettings; @@ -287,6 +288,10 @@ final class Bootstrap { final SecureSettings keystore = loadSecureSettings(initialEnv); final Environment environment = createEnvironment(pidFile, keystore, initialEnv.settings(), initialEnv.configFile()); + + String nodeName = Node.NODE_NAME_SETTING.get(environment.settings()); + NodeNamePatternConverter.setNodeName(nodeName); + try { LogConfigurator.configure(environment); } catch (IOException e) { @@ -317,8 +322,7 @@ final class Bootstrap { // install the default uncaught exception handler; must be done before security is // initialized as we do not want to grant the runtime permission // setDefaultUncaughtExceptionHandler - Thread.setDefaultUncaughtExceptionHandler( - new ElasticsearchUncaughtExceptionHandler(() -> Node.NODE_NAME_SETTING.get(environment.settings()))); + Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler()); INSTANCE.setup(true, environment); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 19fdb8837d6..1a028042db2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.process.ProcessProbe; -import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; import java.io.BufferedReader; @@ -74,8 +73,7 @@ final class BootstrapChecks { combinedChecks.addAll(additionalChecks); check( context, enforceLimits(boundTransportAddress, DiscoveryModule.DISCOVERY_TYPE_SETTING.get(context.settings)), - Collections.unmodifiableList(combinedChecks), - Node.NODE_NAME_SETTING.get(context.settings)); + Collections.unmodifiableList(combinedChecks)); } /** @@ -86,14 +84,12 @@ final class BootstrapChecks { * @param context the current node boostrap context * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned * @param checks the checks to execute - * @param nodeName the node name to be used as a logging prefix */ static void check( final BootstrapContext context, final boolean enforceLimits, - final List checks, - final String nodeName) throws NodeValidationException { - check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class, nodeName)); + final List checks) throws NodeValidationException { + check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class)); } /** diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java index 857ff65b6c2..1ef9b7740c2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java @@ -27,16 +27,9 @@ import org.elasticsearch.common.logging.Loggers; import java.io.IOError; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Objects; -import java.util.function.Supplier; class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { - - private final Supplier loggingPrefixSupplier; - - ElasticsearchUncaughtExceptionHandler(final Supplier loggingPrefixSupplier) { - this.loggingPrefixSupplier = Objects.requireNonNull(loggingPrefixSupplier); - } + private static final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class); @Override public void uncaughtException(Thread t, Throwable e) { @@ -70,12 +63,10 @@ class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionH } void onFatalUncaught(final String threadName, final Throwable t) { - final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get()); logger.error(() -> new ParameterizedMessage("fatal error in thread [{}], exiting", threadName), t); } void onNonFatalUncaught(final String threadName, final Throwable t) { - final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get()); logger.warn(() -> new ParameterizedMessage("uncaught exception in thread [{}]", threadName), t); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 44d7d17b593..caa922b92cf 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -46,6 +46,15 @@ public final class ESLoggerFactory { } public static Logger getLogger(String prefix, Logger logger) { + /* + * In a followup we'll throw an exception if prefix is null or empty + * redirecting folks to LogManager.getLogger. + * + * This and more is tracked in https://github.com/elastic/elasticsearch/issues/32174 + */ + if (prefix == null || prefix.length() == 0) { + return logger; + } return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 01aca53db05..4e3771f3668 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -23,12 +23,16 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.AbstractConfiguration; +import org.apache.logging.log4j.core.config.ConfigurationException; +import org.apache.logging.log4j.core.config.ConfigurationSource; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory; import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration; import org.apache.logging.log4j.core.config.composite.CompositeConfiguration; +import org.apache.logging.log4j.core.config.plugins.util.PluginManager; import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration; +import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationBuilder; import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; @@ -43,6 +47,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import java.io.IOException; +import java.io.InputStream; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; @@ -50,9 +55,12 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.StreamSupport; @@ -119,6 +127,13 @@ public class LogConfigurator { configure(environment.settings(), environment.configFile(), environment.logsFile()); } + /** + * Load logging plugins so we can have {@code node_name} in the pattern. + */ + public static void loadLog4jPlugins() { + PluginManager.addPackage(LogConfigurator.class.getPackage().getName()); + } + private static void checkErrorListener() { assert errorListenerIsRegistered() : "expected error listener to be registered"; if (error.get()) { @@ -135,6 +150,8 @@ public class LogConfigurator { Objects.requireNonNull(configsPath); Objects.requireNonNull(logsPath); + loadLog4jPlugins(); + setLogConfigurationSystemProperty(logsPath, settings); // we initialize the status logger immediately otherwise Log4j will complain when we try to get the context configureStatusLogger(); @@ -142,7 +159,53 @@ public class LogConfigurator { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final List configurations = new ArrayList<>(); - final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory(); + + /* + * Subclass the properties configurator to hack the new pattern in + * place so users don't have to change log4j2.properties in + * a minor release. In 7.0 we'll remove this and force users to + * change log4j2.properties. If they don't customize log4j2.properties + * then they won't have to do anything anyway. + * + * Everything in this subclass that isn't marked as a hack is copied + * from log4j2's source. + */ + Set locationsWithDeprecatedPatterns = Collections.synchronizedSet(new HashSet<>()); + final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory() { + @Override + public PropertiesConfiguration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) { + final Properties properties = new Properties(); + try (InputStream configStream = source.getInputStream()) { + properties.load(configStream); + } catch (final IOException ioe) { + throw new ConfigurationException("Unable to load " + source.toString(), ioe); + } + // Hack the new pattern into place + for (String name : properties.stringPropertyNames()) { + if (false == name.endsWith(".pattern")) continue; + // Null is weird here but we can't do anything with it so ignore it + String value = properties.getProperty(name); + if (value == null) continue; + // Tests don't need to be changed + if (value.contains("%test_thread_info")) continue; + /* + * Patterns without a marker are sufficiently customized + * that we don't have an opinion about them. + */ + if (false == value.contains("%marker")) continue; + if (false == value.contains("%node_name")) { + locationsWithDeprecatedPatterns.add(source.getLocation()); + properties.setProperty(name, value.replace("%marker", "[%node_name]%marker ")); + } + } + // end hack + return new PropertiesConfigurationBuilder() + .setConfigurationSource(source) + .setRootProperties(properties) + .setLoggerContext(loggerContext) + .build(); + } + }; final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); Files.walkFileTree(configsPath, options, Integer.MAX_VALUE, new SimpleFileVisitor() { @Override @@ -163,6 +226,14 @@ public class LogConfigurator { context.start(new CompositeConfiguration(configurations)); configureLoggerLevels(settings); + + final String deprecatedLocationsString = String.join("\n ", locationsWithDeprecatedPatterns); + if (deprecatedLocationsString.length() > 0) { + LogManager.getLogger(LogConfigurator.class).warn("Some logging configurations have %marker but don't have %node_name. " + + "We will automatically add %node_name to the pattern to ease the migration for users who customize " + + "log4j2.properties but will stop this behavior in 7.0. You should manually replace `%node_name` with " + + "`[%node_name]%marker ` in these locations:\n {}", deprecatedLocationsString); + } } private static void configureStatusLogger() { diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 40983c517c7..58ffe277531 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -31,13 +31,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.node.Node; -import java.util.ArrayList; -import java.util.List; import java.util.Map; -import static java.util.Arrays.asList; import static org.elasticsearch.common.util.CollectionUtils.asArrayList; /** @@ -71,29 +67,19 @@ public class Loggers { } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - final List prefixesList = prefixesList(settings, prefixes); - return Loggers.getLogger(clazz, prefixesList.toArray(new String[prefixesList.size()])); + return Loggers.getLogger(clazz, prefixes); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { - final List prefixesList = prefixesList(settings, prefixes); - return Loggers.getLogger(loggerName, prefixesList.toArray(new String[prefixesList.size()])); - } - - private static List prefixesList(Settings settings, String... prefixes) { - List prefixesList = new ArrayList<>(); - if (Node.NODE_NAME_SETTING.exists(settings)) { - prefixesList.add(Node.NODE_NAME_SETTING.get(settings)); - } - if (prefixes != null && prefixes.length > 0) { - prefixesList.addAll(asList(prefixes)); - } - return prefixesList; + return Loggers.getLogger(loggerName, prefixes); } public static Logger getLogger(Logger parentLogger, String s) { - assert parentLogger instanceof PrefixLogger; - return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s); + String prefix = null; + if (parentLogger instanceof PrefixLogger) { + prefix = ((PrefixLogger)parentLogger).prefix(); + } + return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } public static Logger getLogger(String s) { @@ -126,7 +112,6 @@ public class Loggers { } } if (sb.length() > 0) { - sb.append(" "); prefix = sb.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java new file mode 100644 index 00000000000..ca4c9ab776f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import java.util.Arrays; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.pattern.ConverterKeys; +import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; +import org.apache.logging.log4j.core.pattern.PatternConverter; +import org.apache.lucene.util.SetOnce; + +/** + * Converts {@code %node_name} in log4j patterns into the current node name. + * We *could* use a system property lookup instead but this is very explicit + * and fails fast if we try to use the logger without initializing the node + * name. As a bonus it ought to be ever so slightly faster because it doesn't + * have to look up the system property every time. + */ +@Plugin(category = PatternConverter.CATEGORY, name = "NodeNamePatternConverter") +@ConverterKeys({"node_name"}) +public class NodeNamePatternConverter extends LogEventPatternConverter { + private static final SetOnce NODE_NAME = new SetOnce<>(); + + /** + * Set the name of this node. + */ + public static void setNodeName(String nodeName) { + NODE_NAME.set(nodeName); + } + + /** + * Called by log4j2 to initialize this converter. + */ + public static NodeNamePatternConverter newInstance(final String[] options) { + if (options.length > 0) { + throw new IllegalArgumentException("no options supported but options provided: " + + Arrays.toString(options)); + } + return new NodeNamePatternConverter(NODE_NAME.get()); + } + + private final String nodeName; + + private NodeNamePatternConverter(String nodeName) { + super("NodeName", "node_name"); + this.nodeName = nodeName; + } + + @Override + public void format(LogEvent event, StringBuilder toAppendTo) { + toAppendTo.append(nodeName); + } +} diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index c1ce864223b..b5cc7964085 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -231,7 +231,14 @@ public class Node implements Closeable { } private static final String CLIENT_TYPE = "node"; + private final Lifecycle lifecycle = new Lifecycle(); + + /** + * Logger initialized in the ctor because if it were initialized statically + * then it wouldn't get the node name. + */ + private final Logger logger; private final Injector injector; private final Settings settings; private final Settings originalSettings; @@ -257,13 +264,9 @@ public class Node implements Closeable { } protected Node(final Environment environment, Collection> classpathPlugins) { + logger = Loggers.getLogger(Node.class); final List resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error boolean success = false; - { - // use temp logger just to say we are starting. we can't use it later on because the node name might not be set - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings())); - logger.info("initializing ..."); - } try { originalSettings = environment.settings(); Settings tmpSettings = Settings.builder().put(environment.settings()) @@ -279,7 +282,6 @@ public class Node implements Closeable { final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); final String nodeId = nodeEnvironment.nodeId(); tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId); - final Logger logger = Loggers.getLogger(Node.class, tmpSettings); // this must be captured after the node name is possibly added to the settings final String nodeName = NODE_NAME_SETTING.get(tmpSettings); if (hadPredefinedNodeName == false) { @@ -631,7 +633,6 @@ public class Node implements Closeable { return this; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("starting ..."); pluginLifecycleComponents.forEach(LifecycleComponent::start); @@ -742,7 +743,6 @@ public class Node implements Closeable { if (!lifecycle.moveToStopped()) { return this; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("stopping ..."); injector.getInstance(ResourceWatcherService.class).stop(); @@ -785,7 +785,6 @@ public class Node implements Closeable { return; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("closing ..."); List toClose = new ArrayList<>(); StopWatch stopWatch = new StopWatch("node_close"); diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index e3ba6d19b39..8180dd96e8e 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -131,7 +131,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows(NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, checks, "testExceptionAggregation")); + () -> BootstrapChecks.check(defaultContext, true, checks)); assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second")))); final Throwable[] suppressed = e.getSuppressed(); assertThat(suppressed.length, equalTo(2)); @@ -162,7 +162,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("initial heap size [" + initialHeapSize.get() + "] " + @@ -170,7 +170,7 @@ public class BootstrapChecksTests extends ESTestCase { initialHeapSize.set(maxHeapSize.get()); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if the initial heap size or the max // heap size is not available @@ -179,7 +179,7 @@ public class BootstrapChecksTests extends ESTestCase { } else { maxHeapSize.set(0); } - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testFileDescriptorLimits() throws NodeValidationException { @@ -205,17 +205,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows(NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max file descriptors")); maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current file descriptor count is // not available maxFileDescriptorCount.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testFileDescriptorLimitsThrowsOnInvalidLimit() { @@ -262,15 +262,13 @@ public class BootstrapChecksTests extends ESTestCase { () -> BootstrapChecks.check( bootstrapContext, true, - Collections.singletonList(check), - "testFileDescriptorLimitsThrowsOnInvalidLimit")); + Collections.singletonList(check))); assertThat( e.getMessage(), containsString("memory locking requested for elasticsearch process but memory is not locked")); } else { // nothing should happen - BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check), - "testFileDescriptorLimitsThrowsOnInvalidLimit"); + BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check)); } } } @@ -287,17 +285,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max number of threads")); maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current max number of threads is // not available maxNumberOfThreads.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxSizeVirtualMemory() throws NodeValidationException { @@ -317,16 +315,16 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max size virtual memory")); maxSizeVirtualMemory.set(rlimInfinity); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if max size virtual memory is not available maxSizeVirtualMemory.set(Long.MIN_VALUE); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxFileSizeCheck() throws NodeValidationException { @@ -346,16 +344,16 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max file size")); maxFileSize.set(rlimInfinity); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if max file size is not available maxFileSize.set(Long.MIN_VALUE); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxMapCountCheck() throws NodeValidationException { @@ -370,17 +368,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count")); maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current vm.max_map_count is not // available maxMapCount.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testClientJvmCheck() throws NodeValidationException { @@ -394,14 +392,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("JVM is using the client VM [Java HotSpot(TM) 32-Bit Client VM] " + "but should be using a server VM for the best performance")); vmName.set("Java HotSpot(TM) 32-Bit Server VM"); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testUseSerialGCCheck() throws NodeValidationException { @@ -415,14 +413,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("JVM is using the serial collector but should not be for the best performance; " + "" + "either it's the default for the VM [" + JvmInfo.jvmInfo().getVmName() +"] or -XX:+UseSerialGC was explicitly specified")); useSerialGC.set("false"); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testSystemCallFilterCheck() throws NodeValidationException { @@ -439,15 +437,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), - "testSystemCallFilterCheck")); + () -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck))); assertThat( e.getMessage(), containsString("system call filters failed to install; " + "check the logs and fix your configuration or disable system call filters at your own risk")); isSystemCallFilterInstalled.set(true); - BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck)); BootstrapContext context_1 = new BootstrapContext(Settings.builder().put("bootstrap.system_call_filter", false).build(), null); final BootstrapChecks.SystemCallFilterCheck systemCallFilterNotEnabledCheck = new BootstrapChecks.SystemCallFilterCheck() { @Override @@ -456,9 +453,9 @@ public class BootstrapChecksTests extends ESTestCase { } }; isSystemCallFilterInstalled.set(false); - BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck)); isSystemCallFilterInstalled.set(true); - BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck)); } public void testMightForkCheck() throws NodeValidationException { @@ -553,8 +550,6 @@ public class BootstrapChecksTests extends ESTestCase { final Runnable enableMightFork, final Consumer consumer) throws NodeValidationException { - final String methodName = Thread.currentThread().getStackTrace()[2].getMethodName(); - // if system call filter is disabled, nothing should happen isSystemCallFilterInstalled.set(false); if (randomBoolean()) { @@ -562,13 +557,13 @@ public class BootstrapChecksTests extends ESTestCase { } else { enableMightFork.run(); } - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // if system call filter is enabled, but we will not fork, nothing should // happen isSystemCallFilterInstalled.set(true); disableMightFork.run(); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // if system call filter is enabled, and we might fork, the check should be enforced, regardless of bootstrap checks being enabled // or not @@ -577,7 +572,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), methodName)); + () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check))); consumer.accept(e); } @@ -602,7 +597,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, () -> { - BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); + BootstrapChecks.check(defaultContext, true, checks); }); assertThat( e.getMessage(), @@ -613,7 +608,7 @@ public class BootstrapChecksTests extends ESTestCase { // if not on an early-access build, nothing should happen javaVersion.set(randomFrom("1.8.0_152", "9")); - BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); + BootstrapChecks.check(defaultContext, true, checks); } @@ -649,7 +644,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck))); assertThat( e.getMessage(), containsString( @@ -657,12 +652,12 @@ public class BootstrapChecksTests extends ESTestCase { // if G1GC is disabled, nothing should happen isG1GCEnabled.set(false); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck)); // if on or after update 40, nothing should happen independent of whether or not G1GC is enabled isG1GCEnabled.set(randomBoolean()); jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128))); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck)); final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() { @@ -674,7 +669,7 @@ public class BootstrapChecksTests extends ESTestCase { }; // if not on an Oracle JVM, nothing should happen - BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck)); final BootstrapChecks.G1GCCheck nonJava8Check = new BootstrapChecks.G1GCCheck() { @@ -686,7 +681,7 @@ public class BootstrapChecksTests extends ESTestCase { }; // if not Java 8, nothing should happen - BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check)); } public void testAllPermissionCheck() throws NodeValidationException { @@ -701,12 +696,12 @@ public class BootstrapChecksTests extends ESTestCase { final List checks = Collections.singletonList(allPermissionCheck); final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck")); + () -> BootstrapChecks.check(defaultContext, true, checks)); assertThat(e, hasToString(containsString("granting the all permission effectively disables security"))); // if all permissions are not granted, nothing should happen isAllPermissionGranted.set(false); - BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck"); + BootstrapChecks.check(defaultContext, true, checks); } public void testAlwaysEnforcedChecks() { @@ -724,7 +719,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException alwaysEnforced = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), "testAlwaysEnforcedChecks")); + () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check))); assertThat(alwaysEnforced, hasToString(containsString("error"))); } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java index e2bf07b7d0b..a6e50170d7e 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java @@ -65,7 +65,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { final AtomicInteger observedStatus = new AtomicInteger(); final AtomicReference threadNameReference = new AtomicReference<>(); final AtomicReference throwableReference = new AtomicReference<>(); - thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtError") { + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() { @Override void halt(int status) { @@ -106,7 +106,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { thread.setName(name); final AtomicReference threadNameReference = new AtomicReference<>(); final AtomicReference throwableReference = new AtomicReference<>(); - thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtException") { + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() { @Override void halt(int status) { fail(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index f47cd11e1b4..6169f6d12ee 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1978,7 +1978,7 @@ public class InternalEngineTests extends EngineTestCase { @Override public void append(LogEvent event) { final String formattedMessage = event.getMessage().getFormattedMessage(); - if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0] ")) { + if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0]")) { if (event.getLoggerName().endsWith(".IW") && formattedMessage.contains("IW: now apply all deletes")) { sawIndexWriterMessage = true; diff --git a/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java b/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java new file mode 100644 index 00000000000..35ad331f532 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import java.util.Arrays; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.pattern.ConverterKeys; +import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; +import org.apache.logging.log4j.core.pattern.PatternConverter; +import org.elasticsearch.test.ESIntegTestCase; + +/** + * Converts {@code %test_thread_info} in log4j patterns into information + * based on the loggin thread's name. If that thread is part of an + * {@link ESIntegTestCase} then this information is the node name. + */ +@Plugin(category = PatternConverter.CATEGORY, name = "TestInfoPatternConverter") +@ConverterKeys({"test_thread_info"}) +public class TestThreadInfoPatternConverter extends LogEventPatternConverter { + /** + * Called by log4j2 to initialize this converter. + */ + public static TestThreadInfoPatternConverter newInstance(final String[] options) { + if (options.length > 0) { + throw new IllegalArgumentException("no options supported but options provided: " + + Arrays.toString(options)); + } + return new TestThreadInfoPatternConverter(); + } + + private TestThreadInfoPatternConverter() { + super("TestInfo", "test_thread_info"); + } + + @Override + public void format(LogEvent event, StringBuilder toAppendTo) { + toAppendTo.append(threadInfo(event.getThreadName())); + } + + private static final Pattern ELASTICSEARCH_THREAD_NAME_PATTERN = + Pattern.compile("elasticsearch\\[(.+)\\]\\[.+\\].+"); + private static final Pattern TEST_THREAD_NAME_PATTERN = + Pattern.compile("TEST-.+\\.(.+)-seed#\\[.+\\]"); + private static final Pattern TEST_SUITE_INIT_THREAD_NAME_PATTERN = + Pattern.compile("SUITE-.+-worker"); + private static final Pattern NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN = + Pattern.compile("test_SUITE-CHILD_VM.+cluster\\[T#(.+)\\]"); + static String threadInfo(String threadName) { + Matcher m = ELASTICSEARCH_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + // Thread looks like a node thread so use the node name + return m.group(1); + } + m = TEST_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * Thread looks like a test thread so use the test method name. + * It'd be pretty reasonable not to use a prefix at all here but + * the logger layout pretty much expects one and the test method + * can be pretty nice to have around anyway. + */ + return m.group(1); + } + m = TEST_SUITE_INIT_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * Thread looks like test suite initialization or tead down and + * we don't have any more information to give. Like above, we + * could spit out nothing here but the logger layout expect + * something and it *is* nice to know what lines come from test + * teardown and initialization. + */ + return "suite"; + } + m = NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * These are as yet unnamed integ test nodes. I'd prefer to log + * the node name but I don't have it yet. + */ + return "integ_" + m.group(1) + ""; + } + /* + * These are uncategorized threads. We log the entire thread name in + * case it is useful. We wrap it in `[]` so you tell that it is a + * thread name rather than a node name or something. + */ + return "[" + threadName + "]"; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index ee4f2f3b222..1498a69cf6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -200,6 +201,8 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); + LogConfigurator.loadLog4jPlugins(); + // Enable Netty leak detection and monitor logger for logged leak errors System.setProperty("io.netty.leakDetection.level", "paranoid"); String leakLoggerName = "io.netty.util.ResourceLeakDetector"; @@ -321,7 +324,7 @@ public abstract class ESTestCase extends LuceneTestCase { @Before public final void before() { - logger.info("[{}]: before test", getTestName()); + logger.info("{}before test", getTestParamsForLogging()); assertNull("Thread context initialized twice", threadContext); if (enableWarningsCheck()) { this.threadContext = new ThreadContext(Settings.EMPTY); @@ -350,7 +353,16 @@ public abstract class ESTestCase extends LuceneTestCase { } ensureAllSearchContextsReleased(); ensureCheckIndexPassed(); - logger.info("[{}]: after test", getTestName()); + logger.info("{}after test", getTestParamsForLogging()); + } + + private String getTestParamsForLogging() { + String name = getTestName(); + int start = name.indexOf('{'); + if (start < 0) return ""; + int end = name.lastIndexOf('}'); + if (end < 0) return ""; + return "[" + name.substring(start + 1, end) + "] "; } private void ensureNoWarnings() throws IOException { diff --git a/test/framework/src/main/resources/log4j2-test.properties b/test/framework/src/main/resources/log4j2-test.properties index 828555ed52e..842c9c79d79 100644 --- a/test/framework/src/main/resources/log4j2-test.properties +++ b/test/framework/src/main/resources/log4j2-test.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%test_thread_info]%marker %m%n rootLogger.level = ${sys:tests.es.logger.level:-info} rootLogger.appenderRef.console.ref = console diff --git a/test/framework/src/main/resources/log4j2.component.properties b/test/framework/src/main/resources/log4j2.component.properties new file mode 100644 index 00000000000..8ce0fb493b7 --- /dev/null +++ b/test/framework/src/main/resources/log4j2.component.properties @@ -0,0 +1,9 @@ +# RandomizedRunner renames threads and we'd like to look at accurate thread +# names. Also, this is the default in log4j2's master branch for versions of +# Java after 1.8_102 which most of the versions we use are anyway. After that +# version of Java calling `Thread.getName()` doesn't allocate so UNCACHED +# ought to be faster than CACHED because it doesn't have to deal with +# ThreadLocals. +# While we don't use AsyncLogger, we do end up with mutable log events and +# those use this key for configuration. +AsyncLogger.ThreadNameStrategy = UNCACHED diff --git a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java new file mode 100644 index 00000000000..8a98b867a46 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import static org.elasticsearch.common.logging.TestThreadInfoPatternConverter.threadInfo; + +public class TestThreadInfoPatternConverterTests extends ESTestCase { + private static String suiteInfo; + + @BeforeClass + public static void captureSuiteInfo() { + suiteInfo = threadInfo(Thread.currentThread().getName()); + } + + public void testThreadInfo() { + // Threads that are part of a node get the node name + String nodeName = randomAlphaOfLength(5); + String threadName = EsExecutors.threadName(nodeName, randomAlphaOfLength(20)) + + "[T#" + between(0, 1000) + "]"; + assertEquals(nodeName, threadInfo(threadName)); + + // Test threads get the test name + assertEquals(getTestName(), threadInfo(Thread.currentThread().getName())); + + // Suite initalization gets "suite" + assertEquals("suite", suiteInfo); + + // And stuff that doesn't match anything gets wrapped in [] so we can see it + String unmatched = randomAlphaOfLength(5); + assertEquals("[" + unmatched + "]", threadInfo(unmatched)); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index 4226607f192..f28fe1c297f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -363,9 +363,16 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { final Logger logger = ESLoggerFactory.getLogger(ESNativeRealmMigrateTool.class); Loggers.setLevel(logger, Level.ALL); + final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + final Configuration config = ctx.getConfiguration(); + // create appender final Appender appender = new AbstractAppender(ESNativeRealmMigrateTool.class.getName(), null, - PatternLayout.newBuilder().withPattern("%m").build()) { + PatternLayout.newBuilder() + // Specify the configuration so log4j doesn't re-initialize + .withConfiguration(config) + .withPattern("%m") + .build()) { @Override public void append(LogEvent event) { switch (event.getLevel().getStandardLevel()) { @@ -384,8 +391,6 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { appender.start(); // get the config, detach from parent, remove appenders, add custom appender - final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); - final Configuration config = ctx.getConfiguration(); final LoggerConfig loggerConfig = config.getLoggerConfig(ESNativeRealmMigrateTool.class.getName()); loggerConfig.setParent(null); loggerConfig.getAppenders().forEach((s, a) -> Loggers.removeAppender(logger, a));