diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index adacc1863c5..3c056a5528b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -27,7 +27,6 @@ import org.gradle.api.tasks.OutputDirectory import java.nio.file.Files import java.nio.file.Path -import java.util.regex.Matcher /** * Generates REST tests for each snippet marked // TEST. @@ -100,6 +99,14 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { return snippet.language == 'js' || snippet.curl } + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static shouldAddShardFailureCheck(String path) { + return path.startsWith('_cat') == false && path.startsWith('_xpack/ml/datafeeds/') == false + } + /** * Converts Kibana's block quoted strings into standard JSON. These * {@code """} delimited strings can be embedded in CONSOLE and can @@ -309,13 +316,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { * no shard succeeds. But we need to fail the tests on all of these * because they mean invalid syntax or broken queries or something * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to has assertions in the setup - * section so we have to skip it there. We also have to skip _cat - * actions because they don't return json so we can't is_false - * them. That is ok because they don't have this - * partial-success-is-success thing. + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object */ - if (false == inSetup && false == path.startsWith('_cat')) { + if (false == inSetup && shouldAddShardFailureCheck(path)) { current.println(" - is_false: _shards.failures") } } diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy index d0a7a2825e6..b9863194920 100644 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy @@ -19,9 +19,7 @@ package org.elasticsearch.gradle.doc -import org.elasticsearch.gradle.doc.SnippetsTask.Snippet -import org.gradle.api.InvalidUserDataException - +import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote class RestTestFromSnippetsTaskTest extends GroovyTestCase { @@ -47,4 +45,10 @@ class RestTestFromSnippetsTaskTest extends GroovyTestCase { assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); } + + void testIsDocWriteRequest() { + assertTrue(shouldAddShardFailureCheck("doc-index/_search")); + assertFalse(shouldAddShardFailureCheck("_cat")) + assertFalse(shouldAddShardFailureCheck("_xpack/ml/datafeeds/datafeed-id/_preview")); + } } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0ce008908cf..0c1065ad131 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; @@ -40,8 +39,8 @@ public class TransportNoopBulkAction extends HandledTransportAction { @Inject - public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters - actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, SearchRequest::new, - indexNameExpressionResolver); + public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters) { + super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, + (Writeable.Reader) SearchRequest::new); } @Override diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 9d4582494eb..cd67bc8e483 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; @@ -844,6 +845,18 @@ final class RequestConverters { return request; } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(deleteSnapshotRequest.repository()) + .addPathPart(deleteSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); + return request; + } + static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index b7cd2d52732..36b4f473ce8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import java.io.IOException; @@ -161,4 +163,34 @@ public final class SnapshotClient { restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options, VerifyRepositoryResponse::fromXContent, listener, emptySet()); } + + /** + * Deletes a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param deleteSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteSnapshotResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + DeleteSnapshotResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously deletes a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param deleteSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + DeleteSnapshotResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e7d56a4332b..eee37cea561 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -1857,6 +1858,25 @@ public class RequestConvertersTests extends ESTestCase { assertThat(expectedParams, equalTo(request.getParameters())); } + public void testDeleteSnapshot() { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot); + + DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest(); + deleteSnapshotRequest.repository(repository); + deleteSnapshotRequest.snapshot(snapshot); + setRandomMasterTimeout(deleteSnapshotRequest, expectedParams); + + Request request = RequestConverters.deleteSnapshot(deleteSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 3b27c2631f4..f4d325e158b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,11 +28,14 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Locale; import static org.hamcrest.Matchers.equalTo; @@ -46,6 +49,13 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { highLevelClient().snapshot()::createRepositoryAsync); } + private Response createTestSnapshot(String repository, String snapshot) throws IOException { + Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot)); + createSnapshot.addParameter("wait_for_completion", "true"); + return highLevelClient().getLowLevelClient().performRequest(createSnapshot); + } + + public void testCreateRepository() throws IOException { PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(response.isAcknowledged()); @@ -108,4 +118,21 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { highLevelClient().snapshot()::verifyRepositoryAsync); assertThat(response.getNodes().size(), equalTo(1)); } + + public void testDeleteSnapshot() throws IOException { + String repository = "test_repository"; + String snapshot = "test_snapshot"; + + PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); + assertTrue(putRepositoryResponse.isAcknowledged()); + + Response putSnapshotResponse = createTestSnapshot(repository, snapshot); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode()); + + DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot); + DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); + + assertTrue(response.isAcknowledged()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 8c158a91a51..965f9641e48 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -29,8 +29,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; @@ -41,6 +45,7 @@ import org.elasticsearch.repositories.fs.FsRepository; import java.io.IOException; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -69,6 +74,8 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase private static final String repositoryName = "test_repository"; + private static final String snapshotName = "test_snapshot"; + public void testSnapshotCreateRepository() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -360,10 +367,76 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase } } + public void testSnapshotDeleteSnapshot() throws IOException { + RestHighLevelClient client = highLevelClient(); + + createTestRepositories(); + createTestSnapshots(); + + // tag::delete-snapshot-request + DeleteSnapshotRequest request = new DeleteSnapshotRequest(repositoryName); + request.snapshot(snapshotName); + // end::delete-snapshot-request + + // tag::delete-snapshot-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::delete-snapshot-request-masterTimeout + + // tag::delete-snapshot-execute + DeleteSnapshotResponse response = client.snapshot().delete(request, RequestOptions.DEFAULT); + // end::delete-snapshot-execute + + // tag::delete-snapshot-response + boolean acknowledged = response.isAcknowledged(); // <1> + // end::delete-snapshot-response + assertTrue(acknowledged); + } + + public void testSnapshotDeleteSnapshotAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + DeleteSnapshotRequest request = new DeleteSnapshotRequest(); + + // tag::delete-snapshot-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(DeleteSnapshotResponse deleteSnapshotResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-snapshot-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-snapshot-execute-async + client.snapshot().deleteAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::delete-snapshot-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + private void createTestRepositories() throws IOException { PutRepositoryRequest request = new PutRepositoryRequest(repositoryName); request.type(FsRepository.TYPE); request.settings("{\"location\": \".\"}", XContentType.JSON); assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged()); } + + private void createTestSnapshots() throws IOException { + Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName)); + createSnapshot.addParameter("wait_for_completion", "true"); + Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(200, response.getStatusLine().getStatusCode()); + } } diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl index 265df915440..93a4ba74f64 100755 --- a/dev-tools/es_release_notes.pl +++ b/dev-tools/es_release_notes.pl @@ -32,7 +32,7 @@ my @Groups = ( ">enhancement", ">bug", ">regression", ">upgrade" ); my %Ignore = map { $_ => 1 } - ( ">non-issue", ">refactoring", ">docs", ">test", ":Core/Build" ); + ( ">non-issue", ">refactoring", ">docs", ">test", ">test-failure", ":Core/Build" ); my %Group_Labels = ( '>breaking' => 'Breaking changes', diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java new file mode 100644 index 00000000000..761cd9e1be5 --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Tunes Elasticsearch JVM settings based on inspection of provided JVM options. + */ +final class JvmErgonomics { + private static final long KB = 1024L; + + private static final long MB = 1024L * 1024L; + + private static final long GB = 1024L * 1024L * 1024L; + + + private JvmErgonomics() { + throw new AssertionError("No instances intended"); + } + + /** + * Chooses additional JVM options for Elasticsearch. + * + * @param userDefinedJvmOptions A list of JVM options that have been defined by the user. + * @return A list of additional JVM options to set. + */ + static List choose(List userDefinedJvmOptions) { + List ergonomicChoices = new ArrayList<>(); + Long heapSize = extractHeapSize(userDefinedJvmOptions); + Map systemProperties = extractSystemProperties(userDefinedJvmOptions); + if (heapSize != null) { + if (systemProperties.containsKey("io.netty.allocator.type") == false) { + if (heapSize <= 1 * GB) { + ergonomicChoices.add("-Dio.netty.allocator.type=unpooled"); + } else { + ergonomicChoices.add("-Dio.netty.allocator.type=pooled"); + } + } + } + return ergonomicChoices; + } + + private static final Pattern MAX_HEAP_SIZE = Pattern.compile("^(-Xmx|-XX:MaxHeapSize=)(?\\d+)(?\\w)?$"); + + // package private for testing + static Long extractHeapSize(List userDefinedJvmOptions) { + for (String jvmOption : userDefinedJvmOptions) { + final Matcher matcher = MAX_HEAP_SIZE.matcher(jvmOption); + if (matcher.matches()) { + final long size = Long.parseLong(matcher.group("size")); + final String unit = matcher.group("unit"); + if (unit == null) { + return size; + } else { + switch (unit.toLowerCase(Locale.ROOT)) { + case "k": + return size * KB; + case "m": + return size * MB; + case "g": + return size * GB; + default: + throw new IllegalArgumentException("Unknown unit [" + unit + "] for max heap size in [" + jvmOption + "]"); + } + } + } + } + return null; + } + + private static final Pattern SYSTEM_PROPERTY = Pattern.compile("^-D(?[\\w+].*?)=(?.*)$"); + + // package private for testing + static Map extractSystemProperties(List userDefinedJvmOptions) { + Map systemProperties = new HashMap<>(); + for (String jvmOption : userDefinedJvmOptions) { + final Matcher matcher = SYSTEM_PROPERTY.matcher(jvmOption); + if (matcher.matches()) { + systemProperties.put(matcher.group("key"), matcher.group("value")); + } + } + return systemProperties; + } +} diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index 7f612132d8c..c19c12cfe44 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -78,6 +78,8 @@ final class JvmOptionsParser { } if (invalidLines.isEmpty()) { + List ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions); + jvmOptions.addAll(ergonomicJvmOptions); final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions); Launchers.outPrintln(spaceDelimitedJvmOptions); Launchers.exit(0); diff --git a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java new file mode 100644 index 00000000000..4b075d78b70 --- /dev/null +++ b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class JvmErgonomicsTests extends LaunchersTestCase { + public void testExtractValidHeapSize() { + assertEquals(Long.valueOf(1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx1024"))); + assertEquals(Long.valueOf(2L * 1024 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2g"))); + assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx32M"))); + assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-XX:MaxHeapSize=32M"))); + } + + public void testExtractInvalidHeapSize() { + try { + JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2T")); + fail("Expected IllegalArgumentException to be raised"); + } catch (IllegalArgumentException expected) { + assertEquals("Unknown unit [T] for max heap size in [-Xmx2T]", expected.getMessage()); + } + } + + public void testExtractNoHeapSize() { + assertNull("No spaces allowed", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx 1024"))); + assertNull("JVM option is not present", JvmErgonomics.extractHeapSize(Collections.singletonList(""))); + assertNull("Multiple JVM options per line", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xms2g -Xmx2g"))); + } + + public void testExtractSystemProperties() { + Map expectedSystemProperties = new HashMap<>(); + expectedSystemProperties.put("file.encoding", "UTF-8"); + expectedSystemProperties.put("kv.setting", "ABC=DEF"); + + Map parsedSystemProperties = JvmErgonomics.extractSystemProperties( + Arrays.asList("-Dfile.encoding=UTF-8", "-Dkv.setting=ABC=DEF")); + + assertEquals(expectedSystemProperties, parsedSystemProperties); + } + + public void testExtractNoSystemProperties() { + Map parsedSystemProperties = JvmErgonomics.extractSystemProperties(Arrays.asList("-Xms1024M", "-Xmx1024M")); + assertTrue(parsedSystemProperties.isEmpty()); + } + + public void testLittleMemoryErgonomicChoices() { + String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G")); + List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=unpooled"); + assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap))); + } + + public void testPlentyMemoryErgonomicChoices() { + String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G")); + List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=pooled"); + assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap))); + } +} diff --git a/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc new file mode 100644 index 00000000000..a594db5b602 --- /dev/null +++ b/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc @@ -0,0 +1,73 @@ +[[java-rest-high-snapshot-delete-snapshot]] +=== Delete Snapshot API + +The Delete Snapshot API allows to delete a snapshot. + +[[java-rest-high-snapshot-delete-snapshot-request]] +==== Delete Snapshot Request + +A `DeleteSnapshotRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-request] +-------------------------------------------------- + +==== Optional Arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-snapshot-delete-snapshot-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-delete-snapshot-async]] +==== Asynchronous Execution + +The asynchronous execution of a delete snapshot request requires both the +`DeleteSnapshotRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute-async] +-------------------------------------------------- +<1> The `DeleteSnapshotRequest` to execute and the `ActionListener` +to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteSnapshotResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of a failure. The raised exception is provided as an argument + +[[java-rest-high-cluster-delete-snapshot-response]] +==== Delete Snapshot Response + +The returned `DeleteSnapshotResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-response] +-------------------------------------------------- +<1> Indicates the node has acknowledged the request diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 17acc8f13c0..727088aa573 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -136,11 +136,13 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> * <> * <> +* <> include::snapshot/get_repository.asciidoc[] include::snapshot/create_repository.asciidoc[] include::snapshot/delete_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[] +include::snapshot/delete_snapshot.asciidoc[] == Tasks APIs diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index dd5cb2e702c..ee891fdd09a 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -35,6 +35,8 @@ include::tokenfilters/word-delimiter-tokenfilter.asciidoc[] include::tokenfilters/word-delimiter-graph-tokenfilter.asciidoc[] +include::tokenfilters/multiplexer-tokenfilter.asciidoc[] + include::tokenfilters/stemmer-tokenfilter.asciidoc[] include::tokenfilters/stemmer-override-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc new file mode 100644 index 00000000000..51937084e39 --- /dev/null +++ b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc @@ -0,0 +1,116 @@ +[[analysis-multiplexer-tokenfilter]] +=== Multiplexer Token Filter + +A token filter of type `multiplexer` will emit multiple tokens at the same position, +each version of the token having been run through a different filter. Identical +output tokens at the same position will be removed. + +WARNING: If the incoming token stream has duplicate tokens, then these will also be +removed by the multiplexer + +[float] +=== Options +[horizontal] +filters:: a list of token filters to apply to incoming tokens. These can be any + token filters defined elsewhere in the index mappings. Filters can be chained + using a comma-delimited string, so for example `"lowercase, porter_stem"` would + apply the `lowercase` filter and then the `porter_stem` filter to a single token. + +WARNING: Shingle or multi-word synonym token filters will not function normally + when they are declared in the filters array because they read ahead internally + which is unsupported by the multiplexer + +preserve_original:: if `true` (the default) then emit the original token in + addition to the filtered tokens + + +[float] +=== Settings example + +You can set it up like: + +[source,js] +-------------------------------------------------- +PUT /multiplexer_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : [ "my_multiplexer" ] + } + }, + "filter" : { + "my_multiplexer" : { + "type" : "multiplexer", + "filters" : [ "lowercase", "lowercase, porter_stem" ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +And test it like: + +[source,js] +-------------------------------------------------- +POST /multiplexer_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "Going HOME" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "Going", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "going", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "go", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "HOME", + "start_offset": 6, + "end_offset": 10, + "type": "", + "position": 1 + }, + { + "token": "home", <1> + "start_offset": 6, + "end_offset": 10, + "type": "", + "position": 1 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +<1> The stemmer has also emitted a token `home` at position 1, but because it is a +duplicate of this token it has been removed from the token stream \ No newline at end of file diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index bdbffb0a08d..c04bbd68137 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -1028,11 +1028,38 @@ number of slices. Whether query or indexing performance dominates the runtime depends on the documents being reindexed and cluster resources. +[float] +=== Reindexing many indices +If you have many indices to reindex it is generally better to reindex them +one at a time rather than using a glob pattern to pick up many indices. That +way you can resume the process if there are any errors by removing the +partially completed index and starting over at that index. It also makes +parallelizing the process fairly simple: split the list of indices to reindex +and run each list in parallel. + +One off bash scripts seem to work nicely for this: + +[source,bash] +---------------------------------------------------------------- +for index in i1 i2 i3 i4 i5; do + curl -HContent-Type:application/json -XPOST localhost:9200/_reindex?pretty -d'{ + "source": { + "index": "'$index'" + }, + "dest": { + "index": "'$index'-reindexed" + } + }' +done +---------------------------------------------------------------- +// NOTCONSOLE + [float] === Reindex daily indices -You can use `_reindex` in combination with <> -to reindex daily indices to apply a new template to the existing documents. +Notwithstanding the above advice, you can use `_reindex` in combination with +<> to reindex daily indices to apply +a new template to the existing documents. Assuming you have indices consisting of documents as follows: diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index d684be80c00..39006d1ab53 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -158,6 +158,9 @@ On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]: brew install elasticsearch -------------------------------------------------- +If installation succeeds, Homebrew will finish by saying that you can start Elasticsearch by entering +`elasticsearch`. Do that now. The expected response is described below, under <>. + [float] === Installation example with MSI Windows Installer @@ -216,6 +219,7 @@ And now we are ready to start our node and single cluster: -------------------------------------------------- [float] +[[successfully-running-node]] === Successfully running node If everything goes well with installation, you should see a bunch of messages that look like below: diff --git a/x-pack/docs/en/rest-api/license/delete-license.asciidoc b/docs/reference/licensing/delete-license.asciidoc similarity index 97% rename from x-pack/docs/en/rest-api/license/delete-license.asciidoc rename to docs/reference/licensing/delete-license.asciidoc index 24662664daa..b02406045a9 100644 --- a/x-pack/docs/en/rest-api/license/delete-license.asciidoc +++ b/docs/reference/licensing/delete-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[delete-license]] === Delete License API @@ -41,3 +42,4 @@ When the license is successfully deleted, the API returns the following response "acknowledged": true } ------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/license/get-basic-status.asciidoc b/docs/reference/licensing/get-basic-status.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/get-basic-status.asciidoc rename to docs/reference/licensing/get-basic-status.asciidoc index c6c6385447a..a9cc9cf67ad 100644 --- a/x-pack/docs/en/rest-api/license/get-basic-status.asciidoc +++ b/docs/reference/licensing/get-basic-status.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-basic-status]] === Get Basic Status API diff --git a/x-pack/docs/en/rest-api/license/get-license.asciidoc b/docs/reference/licensing/get-license.asciidoc similarity index 85% rename from x-pack/docs/en/rest-api/license/get-license.asciidoc rename to docs/reference/licensing/get-license.asciidoc index cba6e710576..bf094d99f2f 100644 --- a/x-pack/docs/en/rest-api/license/get-license.asciidoc +++ b/docs/reference/licensing/get-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-license]] === Get License API @@ -52,11 +53,9 @@ GET _xpack/license "license" : { "status" : "active", "uid" : "cbff45e7-c553-41f7-ae4f-9205eabd80xx", - "type" : "trial", + "type" : "basic", "issue_date" : "2018-02-22T23:12:05.550Z", "issue_date_in_millis" : 1519341125550, - "expiry_date" : "2018-03-24T23:12:05.550Z", - "expiry_date_in_millis" : 1521933125550, "max_nodes" : 1000, "issued_to" : "test", "issuer" : "elasticsearch", @@ -65,11 +64,9 @@ GET _xpack/license } -------------------------------------------------- // TESTRESPONSE[s/"cbff45e7-c553-41f7-ae4f-9205eabd80xx"/$body.license.uid/] -// TESTRESPONSE[s/"trial"/$body.license.type/] +// TESTRESPONSE[s/"basic"/$body.license.type/] // TESTRESPONSE[s/"2018-02-22T23:12:05.550Z"/$body.license.issue_date/] // TESTRESPONSE[s/1519341125550/$body.license.issue_date_in_millis/] -// TESTRESPONSE[s/"2018-03-24T23:12:05.550Z"/$body.license.expiry_date/] -// TESTRESPONSE[s/1521933125550/$body.license.expiry_date_in_millis/] // TESTRESPONSE[s/1000/$body.license.max_nodes/] // TESTRESPONSE[s/"test"/$body.license.issued_to/] // TESTRESPONSE[s/"elasticsearch"/$body.license.issuer/] diff --git a/x-pack/docs/en/rest-api/license/get-trial-status.asciidoc b/docs/reference/licensing/get-trial-status.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/get-trial-status.asciidoc rename to docs/reference/licensing/get-trial-status.asciidoc index b2cc1ce1b6c..ec47782a3d2 100644 --- a/x-pack/docs/en/rest-api/license/get-trial-status.asciidoc +++ b/docs/reference/licensing/get-trial-status.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-trial-status]] === Get Trial Status API diff --git a/docs/reference/licensing/index.asciidoc b/docs/reference/licensing/index.asciidoc new file mode 100644 index 00000000000..a1dfd398acf --- /dev/null +++ b/docs/reference/licensing/index.asciidoc @@ -0,0 +1,22 @@ +[role="xpack"] +[[licensing-apis]] +== Licensing APIs + +You can use the following APIs to manage your licenses: + +* <> +* <> +* <> +* <> +* <> +* <> +* <> + + +include::delete-license.asciidoc[] +include::get-license.asciidoc[] +include::get-trial-status.asciidoc[] +include::start-trial.asciidoc[] +include::get-basic-status.asciidoc[] +include::start-basic.asciidoc[] +include::update-license.asciidoc[] diff --git a/x-pack/docs/en/rest-api/license/start-basic.asciidoc b/docs/reference/licensing/start-basic.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/license/start-basic.asciidoc rename to docs/reference/licensing/start-basic.asciidoc index 820b2b5eab6..3206dc0801f 100644 --- a/x-pack/docs/en/rest-api/license/start-basic.asciidoc +++ b/docs/reference/licensing/start-basic.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[start-basic]] === Start Basic API diff --git a/x-pack/docs/en/rest-api/license/start-trial.asciidoc b/docs/reference/licensing/start-trial.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/start-trial.asciidoc rename to docs/reference/licensing/start-trial.asciidoc index 341c72853fd..ba1cc0d7866 100644 --- a/x-pack/docs/en/rest-api/license/start-trial.asciidoc +++ b/docs/reference/licensing/start-trial.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[start-trial]] === Start Trial API diff --git a/x-pack/docs/en/rest-api/license/update-license.asciidoc b/docs/reference/licensing/update-license.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/license/update-license.asciidoc rename to docs/reference/licensing/update-license.asciidoc index 54c55398407..b340cf3ed6e 100644 --- a/x-pack/docs/en/rest-api/license/update-license.asciidoc +++ b/docs/reference/licensing/update-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[update-license]] === Update License API @@ -123,6 +124,7 @@ receive the following response: } } ------------------------------------------------------------ +// NOTCONSOLE To complete the update, you must re-submit the API request and set the `acknowledge` parameter to `true`. For example: diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index 512b0d72545..695cd1c626b 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -36,6 +36,8 @@ GET my_index/_search // CONSOLE // TESTSETUP +NOTE: You can also store ip ranges in a single field using an <>. + [[ip-params]] ==== Parameters for `ip` fields diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index a686ba0bfbf..169943a16ac 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -5,4 +5,10 @@ `isShardsAcked` has been replaced by `isShardsAcknowledged` in `CreateIndexResponse`, `RolloverResponse` and -`CreateIndexClusterStateUpdateResponse`. \ No newline at end of file +`CreateIndexClusterStateUpdateResponse`. + +==== `prepareExecute` removed from the client api + +The `prepareExecute` method which created a request builder has been +removed from the client api. Instead, construct a builder for the +appropriate request directly. diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index bd66c7f071c..19035d96ae0 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -184,12 +184,6 @@ is the same as `like`. `fields`:: A list of fields to fetch and analyze the text from. -`like_text`:: -The text to find documents like it. - -`ids` or `docs`:: -A list of documents following the same syntax as the <>. - [float] [[mlt-query-term-selection]] ==== Term Selection Parameters diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 8c58246a0a6..e44eea9aa53 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -18,9 +18,9 @@ directly to configure and access {xpack} features. -- -include::{xes-repo-dir}/rest-api/info.asciidoc[] +include::info.asciidoc[] include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] -include::{xes-repo-dir}/rest-api/licensing.asciidoc[] +include::{es-repo-dir}/licensing/index.asciidoc[] include::{xes-repo-dir}/rest-api/migration.asciidoc[] include::{xes-repo-dir}/rest-api/ml-api.asciidoc[] include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[] diff --git a/x-pack/docs/en/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc similarity index 84% rename from x-pack/docs/en/rest-api/info.asciidoc rename to docs/reference/rest-api/info.asciidoc index ccb979124f2..1cf4ab563b1 100644 --- a/x-pack/docs/en/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -1,8 +1,9 @@ [role="xpack"] +[testenv="basic"] [[info-api]] == Info API -The info API provides general information about the installed {xpack}. +The info API provides general information about the installed {xpack} features. [float] === Request @@ -55,30 +56,29 @@ Example response: "date" : "2015-04-07T13:34:42Z" }, "license" : { - "uid" : "893361dc-9749-4997-93cb-802e3dofh7aa", - "type" : "trial", - "mode" : "trial", - "status" : "active", - "expiry_date_in_millis" : 1914278399999 + "uid" : "893361dc-9749-4997-93cb-xxx", + "type" : "basic", + "mode" : "basic", + "status" : "active" }, "features" : { "graph" : { "description" : "Graph Data Exploration for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true }, "logstash" : { "description" : "Logstash management component for X-Pack", - "available" : true, + "available" : false, "enabled" : true }, "ml" : { "description" : "Machine Learning for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true, "native_code_info" : { - "version" : "6.0.0-alpha1-SNAPSHOT", - "build_hash" : "d081461967d61a" + "version" : "7.0.0-alpha1-SNAPSHOT", + "build_hash" : "99a07c016d5a73" } }, "monitoring" : { @@ -93,12 +93,12 @@ Example response: }, "security" : { "description" : "Security for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true }, "watcher" : { "description" : "Alerting, Notification and Automation for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true } }, @@ -107,10 +107,10 @@ Example response: ------------------------------------------------------------ // TESTRESPONSE[s/"hash" : "2798b1a3ce779b3611bb53a0082d4d741e4d3168",/"hash" : "$body.build.hash",/] // TESTRESPONSE[s/"date" : "2015-04-07T13:34:42Z"/"date" : "$body.build.date"/] -// TESTRESPONSE[s/"uid" : "893361dc-9749-4997-93cb-802e3dofh7aa",/"uid": "$body.license.uid",/] +// TESTRESPONSE[s/"uid" : "893361dc-9749-4997-93cb-xxx",/"uid": "$body.license.uid",/] // TESTRESPONSE[s/"expiry_date_in_millis" : 1914278399999/"expiry_date_in_millis" : "$body.license.expiry_date_in_millis"/] -// TESTRESPONSE[s/"version" : "6.0.0-alpha1-SNAPSHOT",/"version": "$body.features.ml.native_code_info.version",/] -// TESTRESPONSE[s/"build_hash" : "d081461967d61a"/"build_hash": "$body.features.ml.native_code_info.build_hash"/] +// TESTRESPONSE[s/"version" : "7.0.0-alpha1-SNAPSHOT",/"version": "$body.features.ml.native_code_info.version",/] +// TESTRESPONSE[s/"build_hash" : "99a07c016d5a73"/"build_hash": "$body.features.ml.native_code_info.build_hash"/] // So much s/// but at least we test that the layout is close to matching.... The following example only returns the build and features information: diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java index 7c718237cd2..f7e6fbb7687 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java @@ -58,7 +58,6 @@ public final class InboundChannelBuffer implements AutoCloseable { this.pageSupplier = pageSupplier; this.pages = new ArrayDeque<>(); this.capacity = PAGE_SIZE * pages.size(); - ensureCapacity(PAGE_SIZE); } public static InboundChannelBuffer allocatingInstance() { diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java index 8dd72e869e8..162094953d2 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java @@ -34,16 +34,20 @@ public class InboundChannelBufferTests extends ESTestCase { new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> { }); - public void testNewBufferHasSinglePage() { + public void testNewBufferNoPages() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); - assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); - assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); + assertEquals(0, channelBuffer.getCapacity()); + assertEquals(0, channelBuffer.getRemaining()); assertEquals(0, channelBuffer.getIndex()); } public void testExpandCapacity() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + assertEquals(0, channelBuffer.getCapacity()); + assertEquals(0, channelBuffer.getRemaining()); + + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -56,6 +60,7 @@ public class InboundChannelBufferTests extends ESTestCase { public void testExpandCapacityMultiplePages() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); @@ -68,6 +73,7 @@ public class InboundChannelBufferTests extends ESTestCase { public void testExpandCapacityRespectsOffset() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -87,6 +93,7 @@ public class InboundChannelBufferTests extends ESTestCase { public void testIncrementIndex() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(0, channelBuffer.getIndex()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -99,6 +106,7 @@ public class InboundChannelBufferTests extends ESTestCase { public void testIncrementIndexWithOffset() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(0, channelBuffer.getIndex()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index cdd8101a73c..ca2f74b5efe 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -226,6 +226,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { filters.put("limit", LimitTokenCountFilterFactory::new); filters.put("lowercase", LowerCaseTokenFilterFactory::new); filters.put("min_hash", MinHashTokenFilterFactory::new); + filters.put("multiplexer", MultiplexerTokenFilterFactory::new); filters.put("ngram", NGramTokenFilterFactory::new); filters.put("nGram", NGramTokenFilterFactory::new); filters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java new file mode 100644 index 00000000000..1cf5303a772 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; +import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.ReferringFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory implements ReferringFilterFactory { + + private List filters; + private List filterNames; + private final boolean preserveOriginal; + + private static final TokenFilterFactory IDENTITY_FACTORY = new TokenFilterFactory() { + @Override + public String name() { + return "identity"; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + }; + + public MultiplexerTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) throws IOException { + super(indexSettings, name, settings); + this.filterNames = settings.getAsList("filters"); + this.preserveOriginal = settings.getAsBoolean("preserve_original", true); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + List> functions = new ArrayList<>(); + for (TokenFilterFactory tff : filters) { + functions.add(tff::create); + } + return new RemoveDuplicatesTokenFilter(new MultiplexTokenFilter(tokenStream, functions)); + } + + @Override + public void setReferences(Map factories) { + filters = new ArrayList<>(); + if (preserveOriginal) { + filters.add(IDENTITY_FACTORY); + } + for (String filter : filterNames) { + String[] parts = Strings.tokenizeToStringArray(filter, ","); + if (parts.length == 1) { + filters.add(resolveFilterFactory(factories, parts[0])); + } else { + List chain = new ArrayList<>(); + for (String subfilter : parts) { + chain.add(resolveFilterFactory(factories, subfilter)); + } + filters.add(chainFilters(filter, chain)); + } + } + } + + private TokenFilterFactory chainFilters(String name, List filters) { + return new TokenFilterFactory() { + @Override + public String name() { + return name; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + for (TokenFilterFactory tff : filters) { + tokenStream = tff.create(tokenStream); + } + return tokenStream; + } + }; + } + + private TokenFilterFactory resolveFilterFactory(Map factories, String name) { + if (factories.containsKey(name) == false) { + throw new IllegalArgumentException("Multiplexing filter [" + name() + "] refers to undefined tokenfilter [" + name + "]"); + } else { + return factories.get(name); + } + } + + private final class MultiplexTokenFilter extends TokenFilter { + + private final TokenStream source; + private final int filterCount; + + private int selector; + + /** + * Creates a MultiplexTokenFilter on the given input with a set of filters + */ + MultiplexTokenFilter(TokenStream input, List> filters) { + super(input); + TokenStream source = new MultiplexerFilter(input); + for (int i = 0; i < filters.size(); i++) { + final int slot = i; + source = new ConditionalTokenFilter(source, filters.get(i)) { + @Override + protected boolean shouldFilter() { + return slot == selector; + } + }; + } + this.source = source; + this.filterCount = filters.size(); + this.selector = filterCount - 1; + } + + @Override + public boolean incrementToken() throws IOException { + return source.incrementToken(); + } + + @Override + public void end() throws IOException { + source.end(); + } + + @Override + public void reset() throws IOException { + source.reset(); + } + + private final class MultiplexerFilter extends TokenFilter { + + State state; + PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + + private MultiplexerFilter(TokenStream input) { + super(input); + } + + @Override + public boolean incrementToken() throws IOException { + if (selector >= filterCount - 1) { + selector = 0; + if (input.incrementToken() == false) { + return false; + } + state = captureState(); + return true; + } + restoreState(state); + posIncAtt.setPositionIncrement(0); + selector++; + return true; + } + + @Override + public void reset() throws IOException { + super.reset(); + selector = filterCount - 1; + this.state = null; + } + } + + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java new file mode 100644 index 00000000000..c39fa05c26f --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.util.Collections; + +public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase { + + public void testMultiplexingFilter() throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.t.type", "truncate") + .put("index.analysis.filter.t.length", "2") + .put("index.analysis.filter.multiplexFilter.type", "multiplexer") + .putList("index.analysis.filter.multiplexFilter.filters", "lowercase, t", "uppercase") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "multiplexFilter") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ + "ONe", "on", "ONE", "tHree", "th", "THREE" + }, new int[]{ + 1, 0, 0, 1, 0, 0 + }); + // Duplicates are removed + assertAnalyzesTo(analyzer, "ONe THREE", new String[]{ + "ONe", "on", "ONE", "THREE", "th" + }, new int[]{ + 1, 0, 0, 1, 0, 0 + }); + } + } + + public void testMultiplexingNoOriginal() throws IOException { + + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.t.type", "truncate") + .put("index.analysis.filter.t.length", "2") + .put("index.analysis.filter.multiplexFilter.type", "multiplexer") + .put("index.analysis.filter.multiplexFilter.preserve_original", "false") + .putList("index.analysis.filter.multiplexFilter.filters", "lowercase, t", "uppercase") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "multiplexFilter") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ + "on", "ONE", "th", "THREE" + }, new int[]{ + 1, 0, 1, 0, + }); + } + + } + +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 4b74bb80045..77ad363b506 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -116,9 +115,8 @@ public class GrokProcessorGetAction extends Action { @@ -55,11 +55,12 @@ public class TransportSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); this.scriptService = scriptService; this.searchAction = searchAction; this.xContentRegistry = xContentRegistry; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 5430303feb2..ea0664b2aa4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -282,9 +281,8 @@ public class PainlessExecuteAction extends Action) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; this.client = client; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index e2de5cd4ffc..35aa8d77d10 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -19,13 +19,14 @@ package org.elasticsearch.index.reindex; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -40,10 +41,10 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 5e0ad0fd3fd..3db3a0d2a91 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -97,18 +97,20 @@ public class TransportReindexAction extends HandledTransportAction { private final Client client; @@ -51,11 +51,10 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 473985d2109..981a417449f 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -22,6 +22,7 @@ package org.elasticsearch.http.netty4; import io.netty.channel.Channel; import io.netty.channel.ChannelPromise; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.concurrent.CompletableContext; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpResponse; import org.elasticsearch.transport.netty4.Netty4Utils; @@ -31,9 +32,23 @@ import java.net.InetSocketAddress; public class Netty4HttpChannel implements HttpChannel { private final Channel channel; + private final CompletableContext closeContext = new CompletableContext<>(); Netty4HttpChannel(Channel channel) { this.channel = channel; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); } @Override @@ -65,6 +80,16 @@ public class Netty4HttpChannel implements HttpChannel { return (InetSocketAddress) channel.remoteAddress(); } + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + @Override public void close() { channel.close(); @@ -73,4 +98,12 @@ public class Netty4HttpChannel implements HttpChannel { public Channel getNettyChannel() { return channel; } + + @Override + public String toString() { + return "Netty4HttpChannel{" + + "localAddress=" + getLocalAddress() + + ", remoteAddress=" + getRemoteAddress() + + '}'; + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 4547a63a9a2..124bd607ab7 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -29,6 +29,8 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.transport.netty4.Netty4Utils; +import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.HTTP_CHANNEL_KEY; + @ChannelHandler.Sharable class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { @@ -40,7 +42,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler msg) throws Exception { - Netty4HttpChannel channel = ctx.channel().attr(Netty4HttpServerTransport.HTTP_CHANNEL_KEY).get(); + Netty4HttpChannel channel = ctx.channel().attr(HTTP_CHANNEL_KEY).get(); FullHttpRequest request = msg.getRequest(); try { @@ -75,7 +77,12 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler serverChannels = new ArrayList<>(); - // package private for testing - Netty4OpenChannelsHandler serverOpenChannels; - - private final Netty4CorsConfig corsConfig; public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -216,8 +210,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { protected void doStart() { boolean success = false; try { - this.serverOpenChannels = new Netty4OpenChannelsHandler(logger); - serverBootstrap = new ServerBootstrap(); serverBootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, @@ -281,10 +273,9 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { builder.allowCredentials(); } String[] strMethods = Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_METHODS.get(settings), ","); - HttpMethod[] methods = Arrays.asList(strMethods) - .stream() + HttpMethod[] methods = Arrays.stream(strMethods) .map(HttpMethod::valueOf) - .toArray(size -> new HttpMethod[size]); + .toArray(HttpMethod[]::new); return builder.allowedRequestMethods(methods) .maxAge(SETTING_CORS_MAX_AGE.get(settings)) .allowedRequestHeaders(Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_HEADERS.get(settings), ",")) @@ -327,15 +318,21 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { Netty4Utils.closeChannels(serverChannels); } catch (IOException e) { logger.trace("exception while closing channels", e); + } finally { + serverChannels.clear(); } - serverChannels.clear(); } } - if (serverOpenChannels != null) { - serverOpenChannels.close(); - serverOpenChannels = null; + // TODO: Move all of channel closing to abstract class once server channels are handled + try { + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); + } catch (Exception e) { + logger.warn("unexpected exception while closing http channels", e); } + httpChannels.clear(); + + if (serverBootstrap != null) { serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); @@ -349,38 +346,18 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { @Override public HttpStats stats() { - Netty4OpenChannelsHandler channels = serverOpenChannels; - return new HttpStats(channels == null ? 0 : channels.numberOfOpenChannels(), channels == null ? 0 : channels.totalChannels()); + return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); } - public Netty4CorsConfig getCorsConfig() { - return corsConfig; - } - - protected void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + @Override + protected void onException(HttpChannel channel, Exception cause) { if (cause instanceof ReadTimeoutException) { if (logger.isTraceEnabled()) { - logger.trace("Read timeout [{}]", ctx.channel().remoteAddress()); + logger.trace("Http read timeout {}", channel); } - ctx.channel().close(); + CloseableChannel.closeChannel(channel);; } else { - if (!lifecycle.started()) { - // ignore - return; - } - if (!NetworkExceptionHelper.isCloseConnectionException(cause)) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", ctx.channel()), - cause); - ctx.channel().close(); - } else { - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", ctx.channel()), - cause); - ctx.channel().close(); - } + super.onException(channel, cause); } } @@ -404,9 +381,8 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { @Override protected void initChannel(Channel ch) throws Exception { - Netty4HttpChannel nettyTcpChannel = new Netty4HttpChannel(ch); - ch.attr(HTTP_CHANNEL_KEY).set(nettyTcpChannel); - ch.pipeline().addLast("openChannels", transport.serverOpenChannels); + Netty4HttpChannel nettyHttpChannel = new Netty4HttpChannel(ch); + ch.attr(HTTP_CHANNEL_KEY).set(nettyHttpChannel); ch.pipeline().addLast("read_timeout", new ReadTimeoutHandler(transport.readTimeoutMillis, TimeUnit.MILLISECONDS)); final HttpRequestDecoder decoder = new HttpRequestDecoder( handlingSettings.getMaxInitialLineLength(), @@ -423,10 +399,11 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(handlingSettings.getCompressionLevel())); } if (handlingSettings.isCorsEnabled()) { - ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.getCorsConfig())); + ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.corsConfig)); } ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); ch.pipeline().addLast("handler", requestHandler); + transport.serverAcceptedChannel(nettyHttpChannel); } @Override diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java deleted file mode 100644 index 2270c90967f..00000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport.netty4; - -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; -import io.netty.channel.ChannelFutureListener; -import io.netty.channel.ChannelHandler; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.metrics.CounterMetric; - -import java.io.IOException; -import java.util.Collections; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; - -@ChannelHandler.Sharable -public class Netty4OpenChannelsHandler extends ChannelInboundHandlerAdapter implements Releasable { - - final Set openChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - final CounterMetric openChannelsMetric = new CounterMetric(); - final CounterMetric totalChannelsMetric = new CounterMetric(); - - final Logger logger; - - public Netty4OpenChannelsHandler(Logger logger) { - this.logger = logger; - } - - final ChannelFutureListener remover = new ChannelFutureListener() { - @Override - public void operationComplete(ChannelFuture future) throws Exception { - boolean removed = openChannels.remove(future.channel()); - if (removed) { - openChannelsMetric.dec(); - } - if (logger.isTraceEnabled()) { - logger.trace("channel closed: {}", future.channel()); - } - } - }; - - @Override - public void channelActive(ChannelHandlerContext ctx) throws Exception { - if (logger.isTraceEnabled()) { - logger.trace("channel opened: {}", ctx.channel()); - } - final boolean added = openChannels.add(ctx.channel()); - if (added) { - openChannelsMetric.inc(); - totalChannelsMetric.inc(); - ctx.channel().closeFuture().addListener(remover); - } - - super.channelActive(ctx); - } - - public long numberOfOpenChannels() { - return openChannelsMetric.count(); - } - - public long totalChannels() { - return totalChannelsMetric.count(); - } - - @Override - public void close() { - try { - Netty4Utils.closeChannels(openChannels); - } catch (IOException e) { - logger.trace("exception while closing channels", e); - } - openChannels.clear(); - } - -} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index efa296b6278..760ac1253c6 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.transport.netty4; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -91,7 +92,7 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase final Netty4Transport t = (Netty4Transport) transport; @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 088f0e85dde..255faab5dda 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -36,4 +36,17 @@ public class NioHttpChannel extends NioSocketChannel implements HttpChannel { public void sendResponse(HttpResponse response, ActionListener listener) { getContext().sendMessage(response, ActionListener.toBiConsumer(listener)); } + + @Override + public void addCloseListener(ActionListener listener) { + addCloseListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public String toString() { + return "NioHttpChannel{" + + "localAddress=" + getLocalAddress() + + ", remoteAddress=" + getRemoteAddress() + + '}'; + } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index ba51f7c6848..aa0859e6146 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -20,22 +20,20 @@ package org.elasticsearch.http.nio; import io.netty.handler.codec.http.HttpMethod; -import io.netty.handler.timeout.ReadTimeoutException; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -44,6 +42,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.nio.cors.NioCorsConfig; @@ -115,7 +114,6 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { private final int tcpReceiveBufferSize; private final Set serverChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final Set socketChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private NioGroup nioGroup; private HttpChannelFactory channelFactory; private final NioCorsConfig corsConfig; @@ -156,7 +154,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { int workerCount = NIO_HTTP_WORKER_COUNT.get(settings); nioGroup = new NioGroup(daemonThreadFactory(this.settings, HTTP_SERVER_ACCEPTOR_THREAD_NAME_PREFIX), acceptorCount, daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount, - (s) -> new EventHandler(this::nonChannelExceptionCaught, s)); + (s) -> new EventHandler(this::onNonChannelException, s)); channelFactory = new HttpChannelFactory(); this.boundAddress = createBoundHttpAddress(); @@ -187,12 +185,13 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { } } + // TODO: Move all of channel closing to abstract class once server channels are handled try { - closeChannels(new ArrayList<>(socketChannels)); + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); } catch (Exception e) { logger.warn("unexpected exception while closing http channels", e); } - socketChannels.clear(); + httpChannels.clear(); try { nioGroup.close(); @@ -235,38 +234,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { @Override public HttpStats stats() { - return new HttpStats(serverChannels.size(), socketChannels.size()); - } - - protected void exceptionCaught(NioSocketChannel channel, Exception cause) { - if (cause instanceof ReadTimeoutException) { - if (logger.isTraceEnabled()) { - logger.trace("Read timeout [{}]", channel.getRemoteAddress()); - } - channel.close(); - } else { - if (lifecycle.started() == false) { - // ignore - return; - } - if (NetworkExceptionHelper.isCloseConnectionException(cause) == false) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), - cause); - channel.close(); - } else { - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), - cause); - channel.close(); - } - } - } - - protected void nonChannelExceptionCaught(Exception ex) { - logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), ex); + return new HttpStats(serverChannels.size(), totalChannelsAccepted.get()); } static NioCorsConfig buildCorsConfig(Settings settings) { @@ -324,7 +292,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { } private void acceptChannel(NioSocketChannel socketChannel) { - socketChannels.add(socketChannel); + super.serverAcceptedChannel((HttpChannel) socketChannel); } private class HttpChannelFactory extends ChannelFactory { @@ -342,7 +310,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { }; HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this, handlingSettings, corsConfig); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline, new InboundChannelBuffer(pageSupplier)); nioChannel.setContext(context); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java similarity index 92% rename from plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java rename to plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java index ef2bc875aa9..d700ad567bc 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java @@ -28,11 +28,11 @@ import java.io.IOException; import java.net.StandardSocketOptions; import java.nio.channels.SocketChannel; -public class TcpNioSocketChannel extends NioSocketChannel implements TcpChannel { +public class NioTcpChannel extends NioSocketChannel implements TcpChannel { private final String profile; - public TcpNioSocketChannel(String profile, SocketChannel socketChannel) throws IOException { + public NioTcpChannel(String profile, SocketChannel socketChannel) throws IOException { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java similarity index 92% rename from plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java rename to plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 946563225c6..10bf4ed7523 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -32,11 +32,11 @@ import java.nio.channels.ServerSocketChannel; * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpChannel} * interface. As it is a server socket, setting SO_LINGER and sending messages is not supported. */ -public class TcpNioServerSocketChannel extends NioServerSocketChannel implements TcpChannel { +public class NioTcpServerChannel extends NioServerSocketChannel implements TcpChannel { private final String profile; - public TcpNioServerSocketChannel(String profile, ServerSocketChannel socketChannel) throws IOException { + public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) throws IOException { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java index b85d707dcd9..cf7d37493cb 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java @@ -40,7 +40,6 @@ import org.elasticsearch.nio.NioSelector; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transports; @@ -78,14 +77,14 @@ public class NioTransport extends TcpTransport { } @Override - protected TcpNioServerSocketChannel bind(String name, InetSocketAddress address) throws IOException { + protected NioTcpServerChannel bind(String name, InetSocketAddress address) throws IOException { TcpChannelFactory channelFactory = this.profileToChannelFactory.get(name); return nioGroup.bindServerChannel(address, channelFactory); } @Override - protected TcpNioSocketChannel initiateChannel(InetSocketAddress address, ActionListener connectListener) throws IOException { - TcpNioSocketChannel channel = nioGroup.openChannel(address, clientChannelFactory); + protected NioTcpChannel initiateChannel(InetSocketAddress address, ActionListener connectListener) throws IOException { + NioTcpChannel channel = nioGroup.openChannel(address, clientChannelFactory); channel.addConnectListener(ActionListener.toBiConsumer(connectListener)); return channel; } @@ -131,19 +130,15 @@ public class NioTransport extends TcpTransport { profileToChannelFactory.clear(); } - protected void exceptionCaught(NioSocketChannel channel, Exception exception) { - onException((TcpChannel) channel, exception); - } - protected void acceptChannel(NioSocketChannel channel) { - serverAcceptedChannel((TcpNioSocketChannel) channel); + serverAcceptedChannel((NioTcpChannel) channel); } protected TcpChannelFactory channelFactory(ProfileSettings settings, boolean isClient) { return new TcpChannelFactoryImpl(settings); } - protected abstract class TcpChannelFactory extends ChannelFactory { + protected abstract class TcpChannelFactory extends ChannelFactory { protected TcpChannelFactory(RawChannelFactory rawChannelFactory) { super(rawChannelFactory); @@ -164,14 +159,14 @@ public class NioTransport extends TcpTransport { } @Override - public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { - TcpNioSocketChannel nioChannel = new TcpNioSocketChannel(profileName, channel); + public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { + NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel); Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); }; TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, NioTransport.this); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); BytesChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, new InboundChannelBuffer(pageSupplier)); nioChannel.setContext(context); @@ -179,8 +174,8 @@ public class NioTransport extends TcpTransport { } @Override - public TcpNioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - TcpNioServerSocketChannel nioChannel = new TcpNioServerSocketChannel(profileName, channel); + public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); Consumer acceptor = NioTransport.this::acceptChannel; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java index f2d07b18085..e86653b6858 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java @@ -28,10 +28,10 @@ import java.io.IOException; public class TcpReadWriteHandler extends BytesWriteHandler { - private final TcpNioSocketChannel channel; + private final NioTcpChannel channel; private final TcpTransport transport; - public TcpReadWriteHandler(TcpNioSocketChannel channel, TcpTransport transport) { + public TcpReadWriteHandler(NioTcpChannel channel, TcpTransport transport) { this.channel = channel; this.transport = transport; } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index c78ae25e44a..090fc579c48 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.transport.nio; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -96,7 +97,7 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase { protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 7b43d1c259b..1207300208f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -43,11 +42,10 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction { @Inject - public TransportNodesHotThreadsAction(Settings settings, ThreadPool threadPool, - ClusterService clusterService, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportNodesHotThreadsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters) { super(settings, NodesHotThreadsAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeHotThreads.class); + NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeHotThreads.class); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index afe535601fc..a422f33fd6e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -44,12 +43,10 @@ public class TransportNodesInfoAction extends TransportNodesAction) RemoteInfoRequest::new); this.remoteClusterService = searchTransportService.getRemoteClusterService(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java index d8de78c3e5b..ac6e74d3970 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.xcontent.XContentParser; /** * Delete snapshot response @@ -32,4 +33,9 @@ public class DeleteSnapshotResponse extends AcknowledgedResponse { DeleteSnapshotResponse(boolean acknowledged) { super(acknowledged); } + + public static DeleteSnapshotResponse fromXContent(XContentParser parser) { + return new DeleteSnapshotResponse(parseAcknowledged(parser)); + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 77578546b95..79c7c776a19 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -63,12 +62,11 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction) SyncedFlushRequest::new); this.syncedFlushService = syncedFlushService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index 8729b60e3bf..bbf0219fd70 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -41,14 +41,16 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiGetAction shardAction; + private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiGetAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardMultiGetAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver resolver) { - super(settings, MultiGetAction.NAME, threadPool, transportService, actionFilters, resolver, MultiGetRequest::new); + super(settings, MultiGetAction.NAME, threadPool, transportService, actionFilters, MultiGetRequest::new); this.clusterService = clusterService; this.shardAction = shardAction; + this.indexNameExpressionResolver = resolver; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index d660840e9b7..ea8f44d85f6 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.PipelineStore; @@ -39,8 +39,10 @@ public class SimulatePipelineTransportAction extends HandledTransportAction) SimulatePipelineRequest::new); this.pipelineStore = nodeService.getIngestService().getPipelineStore(); this.executionService = new SimulateExecutionService(threadPool); } diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index d560a7ecc11..18e704be69c 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -40,9 +39,8 @@ public class TransportMainAction extends HandledTransportAction searchAction, - IndexNameExpressionResolver resolver, int availableProcessors, LongSupplier relativeTimeProvider) { - super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, resolver, MultiSearchRequest::new); + int availableProcessors, LongSupplier relativeTimeProvider) { + super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; this.searchAction = searchAction; this.availableProcessors = availableProcessors; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index ad3b2efd42f..311ba02f523 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -74,19 +75,22 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); SearchTransportService.registerRequestHandler(transportService, searchService); this.clusterService = clusterService; this.searchService = searchService; + this.indexNameExpressionResolver = indexNameExpressionResolver; } private Map buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 6f230c9bd8b..77425ecd5db 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -22,9 +22,9 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -43,10 +43,9 @@ public class TransportSearchScrollAction extends HandledTransportAction) SearchScrollRequest::new); this.clusterService = clusterService; this.searchTransportService = searchTransportService; this.searchPhaseController = searchPhaseController; diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index c584db10699..d6febf82876 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -22,7 +22,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -39,29 +38,28 @@ import java.util.function.Supplier; public abstract class HandledTransportAction extends TransportAction { protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ActionFilters actionFilters, Supplier request) { - this(settings, actionName, true, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + this(settings, actionName, true, threadPool, transportService, actionFilters, request); } protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, Writeable.Reader requestReader, - IndexNameExpressionResolver indexNameExpressionResolver) { - this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader, indexNameExpressionResolver); + ActionFilters actionFilters, Writeable.Reader requestReader) { + this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader); } protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + Supplier request) { + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, false, canTripCircuitBreaker, new TransportHandler()); } protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - Writeable.Reader requestReader, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + Writeable.Reader requestReader) { + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, canTripCircuitBreaker, requestReader, new TransportHandler()); } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 6207f333ced..c8d9849c2e5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -39,16 +38,14 @@ public abstract class TransportAction request, Supplier shardRequest, String shardExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.transportShardAction = actionName + "[s]"; transportService.registerRequestHandler(transportShardAction, shardRequest, shardExecutor, new ShardTransportHandler()); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index ff4e73acc18..ca50e2acd14 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -81,6 +81,7 @@ public abstract class TransportBroadcastByNodeAction request, String executor, boolean canTripCircuitBreaker) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, indexNameExpressionResolver, + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; transportNodeBroadcastAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 42d7da11846..1881db0f13e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -56,6 +56,7 @@ import java.util.function.Supplier; public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String executor; @@ -74,10 +75,11 @@ public abstract class TransportMasterNodeAction request) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, indexNameExpressionResolver, + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -85,10 +87,11 @@ public abstract class TransportMasterNodeAction request, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request, - indexNameExpressionResolver); + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request + ); this.transportService = transportService; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 0b61c7ed712..d47e156680e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.NoSuchNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -63,11 +62,9 @@ public abstract class TransportNodesAction request, Supplier nodeRequest, - String nodeExecutor, + Supplier request, Supplier nodeRequest, String nodeExecutor, Class nodeResponseClass) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = Objects.requireNonNull(clusterService); this.transportService = Objects.requireNonNull(transportService); this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 4cad1c21170..d3d54880f50 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -56,13 +56,15 @@ public abstract class TransportBroadcastReplicationAction request, Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super(settings, name, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, name, threadPool, transportService, actionFilters, request); this.replicatedBroadcastShardAction = replicatedBroadcastShardAction; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index e06c771481f..97f98580616 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -104,6 +104,7 @@ public abstract class TransportReplicationAction< protected final ClusterService clusterService; protected final ShardStateAction shardStateAction; protected final IndicesService indicesService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; protected final TransportRequestOptions transportOptions; protected final String executor; @@ -131,11 +132,12 @@ public abstract class TransportReplicationAction< IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier replicaRequest, String executor, boolean syncGlobalCheckpointAfterOperation) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; this.shardStateAction = shardStateAction; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor; this.transportPrimaryAction = actionName + "[p]"; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index b7582832703..c907c12ac51 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -52,6 +52,7 @@ public abstract class TransportInstanceSingleOperationAction { protected final ClusterService clusterService; protected final TransportService transportService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String executor; final String shardActionName; @@ -59,9 +60,10 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); this.shardActionName = actionName + "[s]"; transportService.registerRequestHandler(shardActionName, request, executor, new ShardTransportHandler()); diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 1a3a3c761c8..6c5d55c8c44 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -61,8 +61,8 @@ import static org.elasticsearch.action.support.TransportActions.isShardNotAvaila public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { protected final ClusterService clusterService; - protected final TransportService transportService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String transportShardAction; final String executor; @@ -70,9 +70,10 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.transportShardAction = actionName + "[s]"; this.executor = executor; diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index aad7d20073c..f852b5efb1a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -78,12 +77,10 @@ public abstract class TransportTasksAction< protected final String transportNodeAction; - protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, - ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier requestSupplier, - Supplier responseSupplier, - String nodeExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, requestSupplier); + protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, Supplier requestSupplier, + Supplier responseSupplier, String nodeExecutor) { + super(settings, actionName, threadPool, transportService, actionFilters, requestSupplier); this.clusterService = clusterService; this.transportService = transportService; this.transportNodeAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 1d164087ed0..9a3fc7b84c2 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -41,16 +41,17 @@ import java.util.concurrent.atomic.AtomicInteger; public class TransportMultiTermVectorsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiTermsVectorAction shardAction; + private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiTermVectorsAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardMultiTermsVectorAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, MultiTermVectorsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, MultiTermVectorsRequest::new); + super(settings, MultiTermVectorsAction.NAME, threadPool, transportService, actionFilters, MultiTermVectorsRequest::new); this.clusterService = clusterService; this.shardAction = shardAction; + this.indexNameExpressionResolver = indexNameExpressionResolver; } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 2543be4811c..8927adfd434 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -730,7 +730,7 @@ public class MasterService extends AbstractLifecycleComponent { return; } final ThreadContext threadContext = threadPool.getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(false); + final Supplier supplier = threadContext.newRestorableContext(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); diff --git a/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java b/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java new file mode 100644 index 00000000000..6b89a90aa2c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.network; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.internal.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; + +public interface CloseableChannel extends Closeable { + + /** + * Closes the channel. For most implementations, this will be be an asynchronous process. For this + * reason, this method does not throw {@link java.io.IOException} There is no guarantee that the channel + * will be closed when this method returns. Use the {@link #addCloseListener(ActionListener)} method + * to implement logic that depends on knowing when the channel is closed. + */ + @Override + void close(); + + /** + * Adds a listener that will be executed when the channel is closed. If the channel is still open when + * this listener is added, the listener will be executed by the thread that eventually closes the + * channel. If the channel is already closed when the listener is added the listener will immediately be + * executed by the thread that is attempting to add the listener. + * + * @param listener to be executed + */ + void addCloseListener(ActionListener listener); + + /** + * Indicates whether a channel is currently open + * + * @return boolean indicating if channel is open + */ + boolean isOpen(); + + /** + * Closes the channel without blocking. + * + * @param channel to close + */ + static void closeChannel(C channel) { + closeChannel(channel, false); + } + + /** + * Closes the channel. + * + * @param channel to close + * @param blocking indicates if we should block on channel close + */ + static void closeChannel(C channel, boolean blocking) { + closeChannels(Collections.singletonList(channel), blocking); + } + + /** + * Closes the channels. + * + * @param channels to close + * @param blocking indicates if we should block on channel close + */ + static void closeChannels(List channels, boolean blocking) { + try { + IOUtils.close(channels); + } catch (IOException e) { + // The CloseableChannel#close method does not throw IOException, so this should not occur. + throw new UncheckedIOException(e); + } + if (blocking) { + ArrayList> futures = new ArrayList<>(channels.size()); + for (final C channel : channels) { + PlainActionFuture closeFuture = PlainActionFuture.newFuture(); + channel.addCloseListener(closeFuture); + futures.add(closeFuture); + } + blockOnFutures(futures); + } + } + + static void blockOnFutures(List> futures) { + for (ActionFuture future : futures) { + try { + future.get(); + } catch (ExecutionException e) { + // Ignore as we are only interested in waiting for the close process to complete. Logging + // close exceptions happens elsewhere. + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 0fa66ca8297..e10c79e504c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -153,7 +153,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexModule.INDEX_STORE_TYPE_SETTING, IndexModule.INDEX_STORE_PRE_LOAD_SETTING, IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING, - IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING, FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, EngineConfig.INDEX_CODEC_SETTING, EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS, diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 4e498d393e2..0a01c7cdabb 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -56,12 +55,10 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); protected volatile BoundTransportAddress boundAddress; protected AbstractHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -166,6 +177,49 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo return publishPort; } + protected void onException(HttpChannel channel, Exception e) { + if (lifecycle.started() == false) { + // just close and ignore - we are already stopped and just need to make sure we release all resources + CloseableChannel.closeChannel(channel); + return; + } + if (NetworkExceptionHelper.isCloseConnectionException(e)) { + logger.trace(() -> new ParameterizedMessage( + "close connection exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else if (NetworkExceptionHelper.isConnectException(e)) { + logger.trace(() -> new ParameterizedMessage( + "connect exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else if (e instanceof CancelledKeyException) { + logger.trace(() -> new ParameterizedMessage( + "cancelled key exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else { + logger.warn(() -> new ParameterizedMessage( + "caught exception while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } + } + + /** + * Exception handler for exceptions that are not associated with a specific channel. + * + * @param exception the exception + */ + protected void onNonChannelException(Exception exception) { + logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), + exception); + } + + protected void serverAcceptedChannel(HttpChannel httpChannel) { + boolean addedOnThisCall = httpChannels.add(httpChannel); + assert addedOnThisCall : "Channel should only be added to http channel set once"; + totalChannelsAccepted.incrementAndGet(); + httpChannel.addCloseListener(ActionListener.wrap(() -> httpChannels.remove(httpChannel))); + logger.trace(() -> new ParameterizedMessage("Http channel accepted: {}", httpChannel)); + } + /** * This method handles an incoming http request. * @@ -181,7 +235,7 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo * * @param httpRequest that is incoming * @param httpChannel that received the http request - * @param exception that was encountered + * @param exception that was encountered */ public void incomingRequestError(final HttpRequest httpRequest, final HttpChannel httpChannel, final Exception exception) { handleIncomingRequest(httpRequest, httpChannel, exception); @@ -219,7 +273,7 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo innerRestRequest = requestWithoutContentTypeHeader(httpRequest, httpChannel, badRequestCause); } catch (final RestRequest.BadParameterException e) { badRequestCause = ExceptionsHelper.useOrSuppress(badRequestCause, e); - innerRestRequest = RestRequest.requestWithoutParameters(xContentRegistry, httpRequest, httpChannel); + innerRestRequest = RestRequest.requestWithoutParameters(xContentRegistry, httpRequest, httpChannel); } restRequest = innerRestRequest; } diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index f5924bb239e..38bf1e751ef 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.AbstractRestChannel; @@ -114,7 +115,7 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann } if (isCloseConnection()) { - toClose.add(httpChannel); + toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); } ActionListener listener = ActionListener.wrap(() -> Releasables.close(toClose)); diff --git a/server/src/main/java/org/elasticsearch/http/HttpChannel.java b/server/src/main/java/org/elasticsearch/http/HttpChannel.java index baea3e0c3b3..ea8d3c276b1 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpChannel.java +++ b/server/src/main/java/org/elasticsearch/http/HttpChannel.java @@ -20,11 +20,11 @@ package org.elasticsearch.http; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.network.CloseableChannel; import java.net.InetSocketAddress; -public interface HttpChannel extends Releasable { +public interface HttpChannel extends CloseableChannel { /** * Sends a http response to the channel. The listener will be executed once the send process has been @@ -49,10 +49,4 @@ public interface HttpChannel extends Releasable { */ InetSocketAddress getRemoteAddress(); - /** - * Closes the channel. This might be an asynchronous process. There is no guarantee that the channel - * will be closed when this method returns. - */ - void close(); - } diff --git a/server/src/main/java/org/elasticsearch/http/HttpStats.java b/server/src/main/java/org/elasticsearch/http/HttpStats.java index ac7f0d69485..4809315ce18 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpStats.java +++ b/server/src/main/java/org/elasticsearch/http/HttpStats.java @@ -32,9 +32,9 @@ public class HttpStats implements Writeable, ToXContentFragment { private final long serverOpen; private final long totalOpen; - public HttpStats(long serverOpen, long totalOpen) { + public HttpStats(long serverOpen, long totalOpened) { this.serverOpen = serverOpen; - this.totalOpen = totalOpen; + this.totalOpen = totalOpened; } public HttpStats(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 61b5cb91712..c61a7cf0706 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -166,7 +166,18 @@ public final class AnalysisRegistry implements Closeable { */ tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings))); - return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); + + Map mappings + = buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); + + // ReferringTokenFilters require references to other tokenfilters, so we pass these in + // after all factories have been registered + for (TokenFilterFactory tff : mappings.values()) { + if (tff instanceof ReferringFilterFactory) { + ((ReferringFilterFactory)tff).setReferences(mappings); + } + } + return mappings; } public Map buildTokenizerFactories(IndexSettings indexSettings) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java new file mode 100644 index 00000000000..9eb9bc2dbd6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import java.util.Map; + +/** + * Marks a {@link TokenFilterFactory} that refers to other filter factories. + * + * The analysis registry will call {@link #setReferences(Map)} with a map of all + * available TokenFilterFactories after all factories have been registered + */ +public interface ReferringFilterFactory { + + /** + * Called with a map of all registered filter factories + */ + void setReferences(Map factories); + +} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index e8a3a14d461..ec42d65eac5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; @@ -301,7 +302,16 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { - cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; + cachingPolicy = new QueryCachingPolicy() { + @Override + public void onUse(Query query) { + + } + @Override + public boolean shouldCache(Query query) { + return true; + } + }; } else { cachingPolicy = new UsageTrackingQueryCachingPolicy(); } diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 404a19b0ab3..3dee58febbd 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -74,9 +73,8 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction implements MultiBucketAggregationBuilder { public static final String NAME = "date_histogram"; + private static DateMathParser EPOCH_MILLIS_PARSER = new DateMathParser(Joda.forPattern("epoch_millis", Locale.ROOT)); public static final Map DATE_FIELD_UNITS; @@ -380,7 +383,7 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil Long anyInstant = null; final IndexNumericFieldData fieldData = context.getForField(ft); for (LeafReaderContext ctx : reader.leaves()) { - AtomicNumericFieldData leafFD = ((IndexNumericFieldData) fieldData).load(ctx); + AtomicNumericFieldData leafFD = fieldData.load(ctx); SortedNumericDocValues values = leafFD.getLongValues(); if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { anyInstant = values.nextValue(); @@ -406,11 +409,8 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil // rounding rounds down, so 'nextTransition' is a good upper bound final long high = nextTransition; - final DocValueFormat format = ft.docValueFormat(null, null); - final Object formattedLow = format.format(low); - final Object formattedHigh = format.format(high); - if (ft.isFieldWithinQuery(reader, formattedLow, formattedHigh, - true, false, tz, null, context) == Relation.WITHIN) { + if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER, + context) == Relation.WITHIN) { // All values in this reader have the same offset despite daylight saving times. // This is very common for location-based timezones such as Europe/Paris in // combination with time-based indices. diff --git a/server/src/main/java/org/elasticsearch/transport/TcpChannel.java b/server/src/main/java/org/elasticsearch/transport/TcpChannel.java index 1a022ee9f48..bc5cc2c92f2 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpChannel.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpChannel.java @@ -21,17 +21,13 @@ package org.elasticsearch.transport; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -43,30 +39,13 @@ import java.util.concurrent.TimeoutException; * abstraction used by the {@link TcpTransport} and {@link TransportService}. All tcp transport * implementations must return channels that adhere to the required method contracts. */ -public interface TcpChannel extends Releasable { - - /** - * Closes the channel. This might be an asynchronous process. There is no guarantee that the channel - * will be closed when this method returns. Use the {@link #addCloseListener(ActionListener)} method - * to implement logic that depends on knowing when the channel is closed. - */ - void close(); +public interface TcpChannel extends CloseableChannel { /** * This returns the profile for this channel. */ String getProfile(); - /** - * Adds a listener that will be executed when the channel is closed. If the channel is still open when - * this listener is added, the listener will be executed by the thread that eventually closes the - * channel. If the channel is already closed when the listener is added the listener will immediately be - * executed by the thread that is attempting to add the listener. - * - * @param listener to be executed - */ - void addCloseListener(ActionListener listener); - /** * This sets the low level socket option {@link java.net.StandardSocketOptions} SO_LINGER on a channel. @@ -77,13 +56,6 @@ public interface TcpChannel extends Releasable { void setSoLinger(int value) throws IOException; - /** - * Indicates whether a channel is currently open - * - * @return boolean indicating if channel is open - */ - boolean isOpen(); - /** * Returns the local address for this channel. * @@ -107,48 +79,6 @@ public interface TcpChannel extends Releasable { */ void sendMessage(BytesReference reference, ActionListener listener); - /** - * Closes the channel without blocking. - * - * @param channel to close - */ - static void closeChannel(C channel) { - closeChannel(channel, false); - } - - /** - * Closes the channel. - * - * @param channel to close - * @param blocking indicates if we should block on channel close - */ - static void closeChannel(C channel, boolean blocking) { - closeChannels(Collections.singletonList(channel), blocking); - } - - /** - * Closes the channels. - * - * @param channels to close - * @param blocking indicates if we should block on channel close - */ - static void closeChannels(List channels, boolean blocking) { - if (blocking) { - ArrayList> futures = new ArrayList<>(channels.size()); - for (final C channel : channels) { - if (channel.isOpen()) { - PlainActionFuture closeFuture = PlainActionFuture.newFuture(); - channel.addCloseListener(closeFuture); - channel.close(); - futures.add(closeFuture); - } - } - blockOnFutures(futures); - } else { - Releasables.close(channels); - } - } - /** * Awaits for all of the pending connections to complete. Will throw an exception if at least one of the * connections fails. @@ -188,17 +118,4 @@ public interface TcpChannel extends Releasable { } } - static void blockOnFutures(List> futures) { - for (ActionFuture future : futures) { - try { - future.get(); - } catch (ExecutionException e) { - // Ignore as we are only interested in waiting for the close process to complete. Logging - // close exceptions happens elsewhere. - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("Future got interrupted", e); - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index c577fae4867..bd862c19e9c 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -479,7 +480,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements } boolean block = lifecycle.stopped() && Transports.isTransportThread(Thread.currentThread()) == false; - TcpChannel.closeChannels(channels, block); + CloseableChannel.closeChannels(channels, block); } finally { transportService.onConnectionClosed(this); } @@ -623,7 +624,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements channels.add(channel); } catch (Exception e) { // If there was an exception when attempting to instantiate the raw channels, we close all of the channels - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw e; } } @@ -632,7 +633,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements try { TcpChannel.awaitConnected(node, connectionFutures, connectionProfile.getConnectTimeout()); } catch (Exception ex) { - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw ex; } @@ -643,7 +644,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements try { version = executeHandshake(node, handshakeChannel, connectionProfile.getHandshakeTimeout()); } catch (Exception ex) { - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw ex; } @@ -962,12 +963,12 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements ActionListener closeFailLogger = ActionListener.wrap(c -> {}, e -> logger.warn(() -> new ParameterizedMessage("Error closing serverChannel for profile [{}]", profile), e)); channels.forEach(c -> c.addCloseListener(closeFailLogger)); - TcpChannel.closeChannels(channels, true); + CloseableChannel.closeChannels(channels, true); } serverChannels.clear(); // close all of the incoming channels. The closeChannels method takes a list so we must convert the set. - TcpChannel.closeChannels(new ArrayList<>(acceptedChannels), true); + CloseableChannel.closeChannels(new ArrayList<>(acceptedChannels), true); acceptedChannels.clear(); @@ -1001,7 +1002,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements protected void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); return; } @@ -1009,20 +1010,20 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements logger.trace(() -> new ParameterizedMessage( "close connection exception caught on transport layer [{}], disconnecting from relevant node", channel), e); // close the channel, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (isConnectException(e)) { logger.trace(() -> new ParameterizedMessage("connect exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof BindException) { logger.trace(() -> new ParameterizedMessage("bind exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof CancelledKeyException) { logger.trace(() -> new ParameterizedMessage( "cancelled key exception caught on transport layer [{}], disconnecting from relevant node", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof TcpTransport.HttpOnTransportException) { // in case we are able to return data, serialize the exception content and sent it back to the client if (channel.isOpen()) { @@ -1030,13 +1031,13 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements final SendMetricListener closeChannel = new SendMetricListener(message.length()) { @Override protected void innerInnerOnResponse(Void v) { - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } @Override protected void innerOnFailure(Exception e) { logger.debug("failed to send message to httpOnTransport channel", e); - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } }; internalSendMessage(channel, message, closeChannel); @@ -1044,7 +1045,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements } else { logger.warn(() -> new ParameterizedMessage("exception caught on transport layer [{}], closing connection", channel), e); // close the channel, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } } @@ -1060,7 +1061,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements protected void serverAcceptedChannel(TcpChannel channel) { boolean addedOnThisCall = acceptedChannels.add(channel); - assert addedOnThisCall : "Channel should only be added to accept channel set once"; + assert addedOnThisCall : "Channel should only be added to accepted channel set once"; channel.addCloseListener(ActionListener.wrap(() -> acceptedChannels.remove(channel))); logger.trace(() -> new ParameterizedMessage("Tcp transport channel accepted: {}", channel)); } diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index f79cb02d83a..2bcbd5bd15a 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -81,7 +81,7 @@ public class ActionModuleTests extends ESTestCase { class FakeTransportAction extends TransportAction { protected FakeTransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TaskManager taskManager) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, taskManager); + super(settings, actionName, threadPool, actionFilters, taskManager); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4baf184e22b..4cb9cd27e7f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -148,8 +147,8 @@ public abstract class TaskManagerTestCase extends ESTestCase { ClusterService clusterService, TransportService transportService, Supplier request, Supplier nodeRequest) { super(settings, actionName, threadPool, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - request, nodeRequest, ThreadPool.Names.GENERIC, NodeResponse.class); + new ActionFilters(new HashSet<>()), + request, nodeRequest, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override @@ -192,12 +191,10 @@ public abstract class TaskManagerTestCase extends ESTestCase { transportService.start(); clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); - IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, - actionFilters, indexNameExpressionResolver); + transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, actionFilters); transportCancelTasksAction = new TransportCancelTasksAction(settings, threadPool, clusterService, - transportService, actionFilters, indexNameExpressionResolver); + transportService, actionFilters); transportService.acceptIncomingRequests(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index bac8f4a8730..0cfe532b8a0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -37,7 +37,6 @@ import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -269,8 +268,8 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin { public TransportTestTaskAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) { super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeResponse.class); + new ActionFilters(new HashSet<>()), + NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override @@ -429,7 +428,7 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin { clusterService, TransportService transportService) { super(settings, UnblockTestTasksAction.NAME, threadPool, clusterService, transportService, new ActionFilters(new - HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + HashSet<>()), UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index fd6f68d4200..33b815e4fbf 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -258,7 +257,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) { super(settings, actionName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), - new IndexNameExpressionResolver(Settings.EMPTY), TestTasksRequest::new, TestTasksResponse::new, + TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java new file mode 100644 index 00000000000..d77dadfb81e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.delete; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +public class DeleteSnapshotResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected DeleteSnapshotResponse doParseInstance(XContentParser parser) { + return DeleteSnapshotResponse.fromXContent(parser); + } + + @Override + protected DeleteSnapshotResponse createBlankInstance() { + return new DeleteSnapshotResponse(); + } + + @Override + protected DeleteSnapshotResponse createTestInstance() { + return new DeleteSnapshotResponse(randomBoolean()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 035cc0be4a5..1c1c0f9476d 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; @@ -70,7 +69,7 @@ public class MainActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportMainAction action = new TransportMainAction(settings, mock(ThreadPool.class), transportService, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), clusterService); + clusterService); AtomicReference responseRef = new AtomicReference<>(); action.doExecute(new MainRequest(), new ActionListener() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index cccc0219f22..39e9ec805e0 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -149,7 +149,7 @@ public class MultiSearchActionTookTests extends ESTestCase { final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); TransportAction searchAction = new TransportAction(Settings.EMPTY, - "action", threadPool, actionFilters, resolver, taskManager) { + "action", threadPool, actionFilters, taskManager) { @Override protected void doExecute(SearchRequest request, ActionListener listener) { requests.add(request); @@ -161,7 +161,7 @@ public class MultiSearchActionTookTests extends ESTestCase { }; if (controlledClock) { - return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, resolver, + return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, availableProcessors, expected::get) { @Override void executeSearch(final Queue requests, final AtomicArray responses, @@ -171,7 +171,7 @@ public class MultiSearchActionTookTests extends ESTestCase { } }; } else { - return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, resolver, + return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, availableProcessors, System::nanoTime) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 2b83dd9aa63..26d5cf2cc14 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -108,7 +108,7 @@ public class TransportMultiSearchActionTests extends ESTestCase { final ExecutorService rarelyExecutor = threadPool.executor(threadPoolNames.get(1)); final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); TransportAction searchAction = new TransportAction - (Settings.EMPTY, "action", threadPool, actionFilters, resolver, taskManager) { + (Settings.EMPTY, "action", threadPool, actionFilters, taskManager) { @Override protected void doExecute(SearchRequest request, ActionListener listener) { requests.add(request); @@ -126,7 +126,7 @@ public class TransportMultiSearchActionTests extends ESTestCase { }; TransportMultiSearchAction action = - new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, resolver, 10, + new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, 10, System::nanoTime); // Execute the multi search api and fail if we find an error after executing: diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index d576d440c02..3a31422dcf8 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -80,7 +80,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = - new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, + new TransportAction(Settings.EMPTY, actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { @@ -158,7 +158,7 @@ public class TransportActionFilterChainTests extends ESTestCase { String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, - actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { + actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 60a46876a71..8a79da044b9 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -250,7 +250,7 @@ public class TransportNodesActionTests extends ESTestCase { transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor) { super(settings, "indices:admin/test", threadPool, clusterService, transportService, actionFilters, - null, request, nodeRequest, nodeExecutor, TestNodeResponse.class); + request, nodeRequest, nodeExecutor, TestNodeResponse.class); } @Override diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index dc1f146b452..a289e9680b4 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -59,7 +59,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, null, new TaskManager(settings, threadPool, Collections.emptySet())); + super(settings, actionName, threadPool, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 20587d31f53..1ef548bd681 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -54,6 +54,7 @@ import org.junit.Before; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -177,6 +178,8 @@ public class MasterServiceTests extends ESTestCase { try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) { final Map expectedHeaders = Collections.singletonMap("test", "test"); + final Map> expectedResponseHeaders = Collections.singletonMap("testResponse", + Arrays.asList("testResponse")); threadPool.getThreadContext().putHeader(expectedHeaders); final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); @@ -187,6 +190,8 @@ public class MasterServiceTests extends ESTestCase { public ClusterState execute(ClusterState currentState) { assertTrue(threadPool.getThreadContext().isSystemContext()); assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders()); + threadPool.getThreadContext().addResponseHeader("testResponse", "testResponse"); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); if (randomBoolean()) { return ClusterState.builder(currentState).build(); @@ -201,6 +206,7 @@ public class MasterServiceTests extends ESTestCase { public void onFailure(String source, Exception e) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -208,6 +214,7 @@ public class MasterServiceTests extends ESTestCase { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -229,6 +236,7 @@ public class MasterServiceTests extends ESTestCase { public void onAllNodesAcked(@Nullable Exception e) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -236,6 +244,7 @@ public class MasterServiceTests extends ESTestCase { public void onAckTimeout() { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -243,6 +252,7 @@ public class MasterServiceTests extends ESTestCase { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getResponseHeaders()); } latch.await(); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 36da9761b97..26a5b87866c 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.analysis.MockTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.en.EnglishAnalyzer; diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index 83bde66e3bd..e155639f143 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -89,6 +89,19 @@ public class IndicesQueryCacheTests extends ESTestCase { } + private static QueryCachingPolicy alwaysCachePolicy() { + return new QueryCachingPolicy() { + @Override + public void onUse(Query query) { + + } + @Override + public boolean shouldCache(Query query) { + return true; + } + }; + } + public void testBasics() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); @@ -98,7 +111,7 @@ public class IndicesQueryCacheTests extends ESTestCase { ShardId shard = new ShardId("index", "_na_", 0); r = ElasticsearchDirectoryReader.wrap(r, shard); IndexSearcher s = new IndexSearcher(r); - s.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) @@ -169,7 +182,7 @@ public class IndicesQueryCacheTests extends ESTestCase { ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); - s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s1.setQueryCachingPolicy(alwaysCachePolicy()); Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); @@ -179,7 +192,7 @@ public class IndicesQueryCacheTests extends ESTestCase { ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); - s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s2.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) @@ -295,7 +308,7 @@ public class IndicesQueryCacheTests extends ESTestCase { ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); - s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s1.setQueryCachingPolicy(alwaysCachePolicy()); Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); @@ -305,7 +318,7 @@ public class IndicesQueryCacheTests extends ESTestCase { ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); - s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s2.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index ddcda105803..e54641bef2f 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -514,8 +514,8 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, P public TransportTestTaskAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, String nodeExecutor) { - super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, actionFilters, + TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a4a561cfee3..26e6f4c0765 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; @@ -41,7 +42,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; @@ -1341,6 +1341,38 @@ public class DateHistogramIT extends ESIntegTestCase { } } + /** + * https://github.com/elastic/elasticsearch/issues/31392 demonstrates an edge case where a date field mapping with + * "format" = "epoch_millis" can lead for the date histogram aggregation to throw an error if a non-UTC time zone + * with daylight savings time is used. This test was added to check this is working now + * @throws ExecutionException + * @throws InterruptedException + */ + public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { + String index = "test31392"; + assertAcked(client().admin().indices().prepareCreate(index).addMapping("type", "d", "type=date,format=epoch_millis").get()); + indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000")); + ensureSearchable(index); + SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).execute().actionGet(); + assertSearchResponse(response); + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo.getBuckets().size(), equalTo(1)); + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); + assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); + + response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd")) + .execute().actionGet(); + assertSearchResponse(response); + histo = response.getAggregations().get("histo"); + assertThat(histo.getBuckets().size(), equalTo(1)); + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2016-11-01")); + assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); + + internalCluster().wipeIndices(index); + } + /** * When DST ends, local time turns back one hour, so between 2am and 4am wall time we should have four buckets: * "2015-10-25T02:00:00.000+02:00", diff --git a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 45e374b8697..16a9d99b783 100644 --- a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -31,8 +31,10 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; @@ -52,7 +54,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(CustomScriptPlugin.class); + return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 979bfccdb64..c63a1c9c6e6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -434,10 +434,6 @@ public abstract class ESIntegTestCase extends ESTestCase { if (randomBoolean()) { randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), randomBoolean()); } - - if (randomBoolean()) { - randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), randomBoolean()); - } PutIndexTemplateRequestBuilder putTemplate = client().admin().indices() .preparePutTemplate("random_index_template") .setPatterns(Collections.singletonList("*")) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index e1c555b8110..be8c824f0f7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; @@ -51,6 +52,8 @@ public final class InternalSettingsPlugin extends Plugin { INDEX_CREATION_DATE_SETTING, PROVIDED_NAME_SETTING, TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING, - IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING); + IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING, + IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 4d4743156c7..2aec495390b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -143,6 +143,16 @@ public class FakeRestRequest extends RestRequest { return remoteAddress; } + @Override + public void addCloseListener(ActionListener listener) { + + } + + @Override + public boolean isOpen() { + return true; + } + @Override public void close() { diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index e9f5f86462f..84c82f4159d 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -65,7 +66,7 @@ public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { final MockTcpTransport t = (MockTcpTransport) transport; @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index bd7fddf82b8..cf9eb5d7a8c 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.transport.nio; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -97,7 +98,7 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index ed70fcd44a7..9abca910c5d 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -9,13 +9,6 @@ apply plugin: 'elasticsearch.docs-test' * only remove entries from this list. When it is empty we'll remove it * entirely and have a party! There will be cake and everything.... */ buildRestTests.expectedUnconvertedCandidates = [ - 'en/ml/functions/count.asciidoc', - 'en/ml/functions/geo.asciidoc', - 'en/ml/functions/info.asciidoc', - 'en/ml/functions/metric.asciidoc', - 'en/ml/functions/rare.asciidoc', - 'en/ml/functions/sum.asciidoc', - 'en/ml/functions/time.asciidoc', 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', 'en/security/authorization/field-and-document-access-control.asciidoc', @@ -54,10 +47,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/watcher/trigger/schedule/weekly.asciidoc', 'en/watcher/trigger/schedule/yearly.asciidoc', 'en/watcher/troubleshooting.asciidoc', - 'en/rest-api/license/delete-license.asciidoc', - 'en/rest-api/license/update-license.asciidoc', - 'en/ml/api-quickref.asciidoc', - 'en/rest-api/ml/delete-calendar-event.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', @@ -71,8 +60,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/preview-datafeed.asciidoc', 'en/rest-api/ml/revert-snapshot.asciidoc', 'en/rest-api/ml/update-snapshot.asciidoc', - 'en/rest-api/ml/validate-detector.asciidoc', - 'en/rest-api/ml/validate-job.asciidoc', 'en/rest-api/watcher/stats.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] diff --git a/x-pack/docs/en/ml/aggregations.asciidoc b/x-pack/docs/en/ml/aggregations.asciidoc index f3b8e6b3e34..5ff54b76f01 100644 --- a/x-pack/docs/en/ml/aggregations.asciidoc +++ b/x-pack/docs/en/ml/aggregations.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-aggregation]] -=== Aggregating Data For Faster Performance +=== Aggregating data for faster performance By default, {dfeeds} fetch data from {es} using search and scroll requests. It can be significantly more efficient, however, to aggregate data in {es} diff --git a/x-pack/docs/en/ml/api-quickref.asciidoc b/x-pack/docs/en/ml/api-quickref.asciidoc index 9602379c374..dc87a6ba209 100644 --- a/x-pack/docs/en/ml/api-quickref.asciidoc +++ b/x-pack/docs/en/ml/api-quickref.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-api-quickref]] -== API Quick Reference +== API quick reference All {ml} endpoints have the following base: @@ -7,6 +8,7 @@ All {ml} endpoints have the following base: ---- /_xpack/ml/ ---- +// NOTCONSOLE The main {ml} resources can be accessed with a variety of endpoints: diff --git a/x-pack/docs/en/ml/categories.asciidoc b/x-pack/docs/en/ml/categories.asciidoc index bb217e2e186..21f71b871cb 100644 --- a/x-pack/docs/en/ml/categories.asciidoc +++ b/x-pack/docs/en/ml/categories.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[ml-configuring-categories]] === Categorizing log messages @@ -77,7 +78,7 @@ NOTE: To add the `categorization_examples_limit` property, you must use the [float] [[ml-configuring-analyzer]] -==== Customizing the Categorization Analyzer +==== Customizing the categorization analyzer Categorization uses English dictionary words to identify log message categories. By default, it also uses English tokenization rules. For this reason, if you use @@ -213,7 +214,7 @@ API examples above. [float] [[ml-viewing-categories]] -==== Viewing Categorization Results +==== Viewing categorization results After you open the job and start the {dfeed} or supply data to the job, you can view the categorization results in {kib}. For example: diff --git a/x-pack/docs/en/ml/configuring.asciidoc b/x-pack/docs/en/ml/configuring.asciidoc index ba965a08b04..c2c6e69a711 100644 --- a/x-pack/docs/en/ml/configuring.asciidoc +++ b/x-pack/docs/en/ml/configuring.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring]] -== Configuring Machine Learning +== Configuring machine learning If you want to use {xpackml} features, there must be at least one {ml} node in your cluster and all master-eligible nodes must have {ml} enabled. By default, diff --git a/x-pack/docs/en/ml/customurl.asciidoc b/x-pack/docs/en/ml/customurl.asciidoc index 7c773c4b9bf..7c197084c0e 100644 --- a/x-pack/docs/en/ml/customurl.asciidoc +++ b/x-pack/docs/en/ml/customurl.asciidoc @@ -48,7 +48,7 @@ using the {ml} APIs. [float] [[ml-configuring-url-strings]] -==== String Substitution in Custom URLs +==== String substitution in custom URLs You can use dollar sign ($) delimited tokens in a custom URL. These tokens are substituted for the values of the corresponding fields in the anomaly records. diff --git a/x-pack/docs/en/ml/functions.asciidoc b/x-pack/docs/en/ml/functions.asciidoc index ae5f768e056..e32470c6827 100644 --- a/x-pack/docs/en/ml/functions.asciidoc +++ b/x-pack/docs/en/ml/functions.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-functions]] -== Function Reference +== Function reference The {xpackml} features include analysis functions that provide a wide variety of flexible ways to analyze data for anomalies. diff --git a/x-pack/docs/en/ml/functions/count.asciidoc b/x-pack/docs/en/ml/functions/count.asciidoc index 4b70f80933d..a2dc5645b61 100644 --- a/x-pack/docs/en/ml/functions/count.asciidoc +++ b/x-pack/docs/en/ml/functions/count.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-count-functions]] -=== Count Functions +=== Count functions Count functions detect anomalies when the number of events in a bucket is anomalous. @@ -21,7 +22,7 @@ The {xpackml} features include the following count functions: [float] [[ml-count]] -===== Count, High_count, Low_count +===== Count, high_count, low_count The `count` function detects anomalies when the number of events in a bucket is anomalous. @@ -44,8 +45,20 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 1: Analyzing events with the count function [source,js] -------------------------------------------------- -{ "function" : "count" } +PUT _xpack/ml/anomaly_detectors/example1 +{ + "analysis_config": { + "detectors": [{ + "function" : "count" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } +} -------------------------------------------------- +// CONSOLE This example is probably the simplest possible analysis. It identifies time buckets during which the overall count of events is higher or lower than @@ -57,12 +70,22 @@ and detects when the event rate is unusual compared to its past behavior. .Example 2: Analyzing errors with the high_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example2 { - "function" : "high_count", - "by_field_name" : "error_code", - "over_field_name": "user" + "analysis_config": { + "detectors": [{ + "function" : "high_count", + "by_field_name" : "error_code", + "over_field_name": "user" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `high_count` function in a detector in your job, it models the event rate for each error code. It detects users that generate an @@ -72,11 +95,21 @@ unusually high count of error codes compared to other users. .Example 3: Analyzing status codes with the low_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example3 { - "function" : "low_count", - "by_field_name" : "status_code" + "analysis_config": { + "detectors": [{ + "function" : "low_count", + "by_field_name" : "status_code" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE In this example, the function detects when the count of events for a status code is lower than usual. @@ -88,22 +121,30 @@ compared to its past behavior. .Example 4: Analyzing aggregated data with the count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example4 { - "summary_count_field_name" : "events_per_min", - "detectors" [ - { "function" : "count" } - ] -} + "analysis_config": { + "summary_count_field_name" : "events_per_min", + "detectors": [{ + "function" : "count" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } +} -------------------------------------------------- +// CONSOLE If you are analyzing an aggregated `events_per_min` field, do not use a sum function (for example, `sum(events_per_min)`). Instead, use the count function -and the `summary_count_field_name` property. -//TO-DO: For more information, see <>. +and the `summary_count_field_name` property. For more information, see +<>. [float] [[ml-nonzero-count]] -===== Non_zero_count, High_non_zero_count, Low_non_zero_count +===== Non_zero_count, high_non_zero_count, low_non_zero_count The `non_zero_count` function detects anomalies when the number of events in a bucket is anomalous, but it ignores cases where the bucket count is zero. Use @@ -144,11 +185,21 @@ The `non_zero_count` function models only the following data: .Example 5: Analyzing signatures with the high_non_zero_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example5 { - "function" : "high_non_zero_count", - "by_field_name" : "signaturename" + "analysis_config": { + "detectors": [{ + "function" : "high_non_zero_count", + "by_field_name" : "signaturename" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `high_non_zero_count` function in a detector in your job, it models the count of events for the `signaturename` field. It ignores any buckets @@ -163,7 +214,7 @@ data is sparse, use the `count` functions, which are optimized for that scenario [float] [[ml-distinct-count]] -===== Distinct_count, High_distinct_count, Low_distinct_count +===== Distinct_count, high_distinct_count, low_distinct_count The `distinct_count` function detects anomalies where the number of distinct values in one field is unusual. @@ -187,11 +238,21 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 6: Analyzing users with the distinct_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example6 { - "function" : "distinct_count", - "field_name" : "user" + "analysis_config": { + "detectors": [{ + "function" : "distinct_count", + "field_name" : "user" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE This `distinct_count` function detects when a system has an unusual number of logged in users. When you use this function in a detector in your job, it @@ -201,12 +262,22 @@ users is unusual compared to the past. .Example 7: Analyzing ports with the high_distinct_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example7 { - "function" : "high_distinct_count", - "field_name" : "dst_port", - "over_field_name": "src_ip" + "analysis_config": { + "detectors": [{ + "function" : "high_distinct_count", + "field_name" : "dst_port", + "over_field_name": "src_ip" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE This example detects instances of port scanning. When you use this function in a detector in your job, it models the distinct count of ports. It also detects the diff --git a/x-pack/docs/en/ml/functions/geo.asciidoc b/x-pack/docs/en/ml/functions/geo.asciidoc index cc98e95bf20..e9685b46e16 100644 --- a/x-pack/docs/en/ml/functions/geo.asciidoc +++ b/x-pack/docs/en/ml/functions/geo.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-geo-functions]] -=== Geographic Functions +=== Geographic functions The geographic functions detect anomalies in the geographic location of the input data. @@ -28,12 +29,22 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 1: Analyzing transactions with the lat_long function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example1 { - "function" : "lat_long", - "field_name" : "transactionCoordinates", - "by_field_name" : "creditCardNumber" + "analysis_config": { + "detectors": [{ + "function" : "lat_long", + "field_name" : "transactionCoordinates", + "by_field_name" : "creditCardNumber" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `lat_long` function in a detector in your job, it detects anomalies where the geographic location of a credit card transaction is @@ -54,6 +65,7 @@ For example, JSON data might contain the following transaction coordinates: "creditCardNumber": "1234123412341234" } -------------------------------------------------- +// NOTCONSOLE In {es}, location data is likely to be stored in `geo_point` fields. For more information, see {ref}/geo-point.html[Geo-point datatype]. This data type is not @@ -64,7 +76,15 @@ format. For example, the following Painless script transforms [source,js] -------------------------------------------------- +PUT _xpack/ml/datafeeds/datafeed-test2 { + "job_id": "farequote", + "indices": ["farequote"], + "query": { + "match_all": { + "boost": 1 + } + }, "script_fields": { "lat-lon": { "script": { @@ -75,5 +95,7 @@ format. For example, the following Painless script transforms } } -------------------------------------------------- +// CONSOLE +// TEST[setup:farequote_job] For more information, see <>. diff --git a/x-pack/docs/en/ml/functions/info.asciidoc b/x-pack/docs/en/ml/functions/info.asciidoc index f964d4eb3ec..2c3117e0e56 100644 --- a/x-pack/docs/en/ml/functions/info.asciidoc +++ b/x-pack/docs/en/ml/functions/info.asciidoc @@ -40,6 +40,7 @@ For more information about those properties, see "over_field_name" : "highest_registered_domain" } -------------------------------------------------- +// NOTCONSOLE If you use this `info_content` function in a detector in your job, it models information that is present in the `subdomain` string. It detects anomalies @@ -60,6 +61,7 @@ choice. "over_field_name" : "src_ip" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_info_content` function in a detector in your job, it models information content that is held in the DNS query string. It detects @@ -77,6 +79,7 @@ information content is higher than expected. "by_field_name" : "logfilename" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_info_content` function in a detector in your job, it models information content that is present in the message string for each diff --git a/x-pack/docs/en/ml/functions/metric.asciidoc b/x-pack/docs/en/ml/functions/metric.asciidoc index 495fc6f3335..3ee51797027 100644 --- a/x-pack/docs/en/ml/functions/metric.asciidoc +++ b/x-pack/docs/en/ml/functions/metric.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-metric-functions]] -=== Metric Functions +=== Metric functions The metric functions include functions such as mean, min and max. These values are calculated for each bucket. Field values that cannot be converted to @@ -42,6 +43,7 @@ For more information about those properties, see "by_field_name" : "product" } -------------------------------------------------- +// NOTCONSOLE If you use this `min` function in a detector in your job, it detects where the smallest transaction is lower than previously observed. You can use this @@ -76,6 +78,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `max` function in a detector in your job, it detects where the longest `responsetime` is longer than previously observed. You can use this @@ -98,6 +101,7 @@ to previous applications. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE The analysis in the previous example can be performed alongside `high_mean` functions by application. By combining detectors and using the same influencer @@ -106,7 +110,7 @@ response times for each bucket. [float] [[ml-metric-median]] -==== Median, High_median, Low_median +==== Median, high_median, low_median The `median` function detects anomalies in the statistical median of a value. The median value is calculated for each bucket. @@ -136,6 +140,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `median` function in a detector in your job, it models the median `responsetime` for each application over time. It detects when the median @@ -143,7 +148,7 @@ median `responsetime` for each application over time. It detects when the median [float] [[ml-metric-mean]] -==== Mean, High_mean, Low_mean +==== Mean, high_mean, low_mean The `mean` function detects anomalies in the arithmetic mean of a value. The mean value is calculated for each bucket. @@ -173,6 +178,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -187,6 +193,7 @@ If you use this `mean` function in a detector in your job, it models the mean "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -201,6 +208,7 @@ mean `responsetime` for each application over time. It detects when the mean "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -237,6 +245,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `metric` function in a detector in your job, it models the mean, min, and max `responsetime` for each application over time. It detects @@ -245,7 +254,7 @@ when the mean, min, or max `responsetime` is unusual compared to previous [float] [[ml-metric-varp]] -==== Varp, High_varp, Low_varp +==== Varp, high_varp, low_varp The `varp` function detects anomalies in the variance of a value which is a measure of the variability and spread in the data. @@ -273,6 +282,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects @@ -288,6 +298,7 @@ behavior. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects @@ -303,6 +314,7 @@ behavior. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects diff --git a/x-pack/docs/en/ml/functions/rare.asciidoc b/x-pack/docs/en/ml/functions/rare.asciidoc index 2485605557c..fc30918b508 100644 --- a/x-pack/docs/en/ml/functions/rare.asciidoc +++ b/x-pack/docs/en/ml/functions/rare.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-rare-functions]] -=== Rare Functions +=== Rare functions The rare functions detect values that occur rarely in time or rarely for a population. @@ -54,6 +55,7 @@ For more information about those properties, see "by_field_name" : "status" } -------------------------------------------------- +// NOTCONSOLE If you use this `rare` function in a detector in your job, it detects values that are rare in time. It models status codes that occur over time and detects @@ -69,6 +71,7 @@ status codes in a web access log that have never (or rarely) occurred before. "over_field_name" : "clientip" } -------------------------------------------------- +// NOTCONSOLE If you use this `rare` function in a detector in your job, it detects values that are rare in a population. It models status code and client IP interactions @@ -111,6 +114,7 @@ For more information about those properties, see "over_field_name" : "clientip" } -------------------------------------------------- +// NOTCONSOLE If you use this `freq_rare` function in a detector in your job, it detects values that are frequently rare in a population. It models URI paths and diff --git a/x-pack/docs/en/ml/functions/sum.asciidoc b/x-pack/docs/en/ml/functions/sum.asciidoc index 3a0f0b264e9..7a95ad63fcc 100644 --- a/x-pack/docs/en/ml/functions/sum.asciidoc +++ b/x-pack/docs/en/ml/functions/sum.asciidoc @@ -1,6 +1,6 @@ - +[role="xpack"] [[ml-sum-functions]] -=== Sum Functions +=== Sum functions The sum functions detect anomalies when the sum of a field in a bucket is anomalous. @@ -16,16 +16,9 @@ The {xpackml} features include the following sum functions: * xref:ml-sum[`sum`, `high_sum`, `low_sum`] * xref:ml-nonnull-sum[`non_null_sum`, `high_non_null_sum`, `low_non_null_sum`] -//// -TBD: Incorporate from prelert docs?: -Input data may contain pre-calculated fields giving the total count of some value e.g. transactions per minute. -Ensure you are familiar with our advice on Summarization of Input Data, as this is likely to provide -a more appropriate method to using the sum function. -//// - [float] [[ml-sum]] -==== Sum, High_sum, Low_sum +==== Sum, high_sum, low_sum The `sum` function detects anomalies where the sum of a field in a bucket is anomalous. @@ -54,6 +47,7 @@ For more information about those properties, see "over_field_name" : "employee" } -------------------------------------------------- +// NOTCONSOLE If you use this `sum` function in a detector in your job, it models total expenses per employees for each cost center. For each time bucket, @@ -69,6 +63,7 @@ to other employees. "over_field_name" : "cs_host" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_sum` function in a detector in your job, it models total `cs_bytes`. It detects `cs_hosts` that transfer unusually high @@ -79,7 +74,7 @@ to find users that are abusing internet privileges. [float] [[ml-nonnull-sum]] -==== Non_null_sum, High_non_null_sum, Low_non_null_sum +==== Non_null_sum, high_non_null_sum, low_non_null_sum The `non_null_sum` function is useful if your data is sparse. Buckets without values are ignored and buckets with a zero value are analyzed. @@ -110,6 +105,7 @@ is not applicable for this function. "byFieldName" : "employee" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_non_null_sum` function in a detector in your job, it models the total `amount_approved` for each employee. It ignores any buckets diff --git a/x-pack/docs/en/ml/functions/time.asciidoc b/x-pack/docs/en/ml/functions/time.asciidoc index a8067e2ca13..ac8199307f1 100644 --- a/x-pack/docs/en/ml/functions/time.asciidoc +++ b/x-pack/docs/en/ml/functions/time.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-time-functions]] -=== Time Functions +=== Time functions The time functions detect events that happen at unusual times, either of the day or of the week. These functions can be used to find unusual patterns of behavior, @@ -60,6 +61,7 @@ For more information about those properties, see "by_field_name" : "process" } -------------------------------------------------- +// NOTCONSOLE If you use this `time_of_day` function in a detector in your job, it models when events occur throughout a day for each process. It detects when an @@ -91,6 +93,7 @@ For more information about those properties, see "over_field_name" : "workstation" } -------------------------------------------------- +// NOTCONSOLE If you use this `time_of_week` function in a detector in your job, it models when events occur throughout the week for each `eventcode`. It detects diff --git a/x-pack/docs/en/ml/populations.asciidoc b/x-pack/docs/en/ml/populations.asciidoc index 53e10ce8d41..bf0dd2ad7d7 100644 --- a/x-pack/docs/en/ml/populations.asciidoc +++ b/x-pack/docs/en/ml/populations.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-pop]] -=== Performing Population Analysis +=== Performing population analysis Entities or events in your data can be considered anomalous when: diff --git a/x-pack/docs/en/ml/stopping-ml.asciidoc b/x-pack/docs/en/ml/stopping-ml.asciidoc index 862fe5cf050..c0be2d947cd 100644 --- a/x-pack/docs/en/ml/stopping-ml.asciidoc +++ b/x-pack/docs/en/ml/stopping-ml.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[stopping-ml]] -== Stopping Machine Learning +== Stopping machine learning An orderly shutdown of {ml} ensures that: @@ -24,10 +25,10 @@ request stops the `feed1` {dfeed}: [source,js] -------------------------------------------------- -POST _xpack/ml/datafeeds/feed1/_stop +POST _xpack/ml/datafeeds/datafeed-total-requests/_stop -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:server_metrics_startdf] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. @@ -63,10 +64,10 @@ example, the following request closes the `job1` job: [source,js] -------------------------------------------------- -POST _xpack/ml/anomaly_detectors/job1/_close +POST _xpack/ml/anomaly_detectors/total-requests/_close -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:server_metrics_openjob] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. diff --git a/x-pack/docs/en/ml/transforms.asciidoc b/x-pack/docs/en/ml/transforms.asciidoc index 9789518081b..c4b4d560297 100644 --- a/x-pack/docs/en/ml/transforms.asciidoc +++ b/x-pack/docs/en/ml/transforms.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-transform]] -=== Transforming Data With Script Fields +=== Transforming data with script fields If you use {dfeeds}, you can add scripts to transform your data before it is analyzed. {dfeeds-cap} contain an optional `script_fields` property, where @@ -602,10 +603,3 @@ The preview {dfeed} API returns the following results, which show that ] ---------------------------------- // TESTRESPONSE - -//// -==== Configuring Script Fields in {dfeeds-cap} - -//TO-DO: Add Kibana steps from -//https://github.com/elastic/prelert-legacy/wiki/Transforming-data-with-script_fields#transforming-geo_point-data-to-a-workable-string-format -//// diff --git a/x-pack/docs/en/rest-api/licensing.asciidoc b/x-pack/docs/en/rest-api/licensing.asciidoc deleted file mode 100644 index b30590630f7..00000000000 --- a/x-pack/docs/en/rest-api/licensing.asciidoc +++ /dev/null @@ -1,22 +0,0 @@ -[role="xpack"] -[[licensing-apis]] -== Licensing APIs - -You can use the following APIs to manage your licenses: - -* <> -* <> -* <> -* <> -* <> -* <> -* <> - - -include::license/delete-license.asciidoc[] -include::license/get-license.asciidoc[] -include::license/get-trial-status.asciidoc[] -include::license/start-trial.asciidoc[] -include::license/get-basic-status.asciidoc[] -include::license/start-basic.asciidoc[] -include::license/update-license.asciidoc[] diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc b/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc index 73458f31791..ef8dad39dba 100644 --- a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc +++ b/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc @@ -44,7 +44,7 @@ calendar: DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st -------------------------------------------------- // CONSOLE -// TEST[skip:automatically-generated ID] +// TEST[catch:missing] When the event is removed, you receive the following results: [source,js] @@ -53,3 +53,4 @@ When the event is removed, you receive the following results: "acknowledged": true } ---- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc b/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc index f688ef91cfe..ab8a0de442c 100644 --- a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc +++ b/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc @@ -28,7 +28,6 @@ see <>. You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Examples @@ -45,7 +44,6 @@ POST _xpack/ml/anomaly_detectors/_validate/detector } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] When the validation completes, you receive the following results: [source,js] @@ -54,3 +52,4 @@ When the validation completes, you receive the following results: "acknowledged": true } ---- +// TESTRESPONSE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc b/x-pack/docs/en/rest-api/ml/validate-job.asciidoc index 61d0c70514e..0ccc5bc04e1 100644 --- a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc +++ b/x-pack/docs/en/rest-api/ml/validate-job.asciidoc @@ -28,7 +28,6 @@ see <>. You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Examples @@ -56,7 +55,6 @@ POST _xpack/ml/anomaly_detectors/_validate } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] When the validation is complete, you receive the following results: [source,js] @@ -65,3 +63,4 @@ When the validation is complete, you receive the following results: "acknowledged": true } ---- +// TESTRESPONSE \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 2e4caff1a72..e58c5eda063 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -254,7 +254,11 @@ public class XPackLicenseState { public XPackLicenseState(Settings settings) { this.isSecurityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); - this.isSecurityExplicitlyEnabled = settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) && isSecurityEnabled; + // 6.0+ requires TLS for production licenses, so if TLS is enabled and security is enabled + // we can interpret this as an explicit enabling of security if the security enabled + // setting is not explicitly set + this.isSecurityExplicitlyEnabled = isSecurityEnabled && + (settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) || XPackSettings.TRANSPORT_SSL_ENABLED.get(settings)); } /** Updates the current state of the license, which will change what features are available. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 415e601a40a..7acbfa49368 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackInfoResponse; @@ -31,10 +30,9 @@ public class TransportXPackInfoAction extends HandledTransportAction featureSets) { - super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - XPackInfoRequest::new); + ActionFilters actionFilters, LicenseService licenseService, Set featureSets) { + super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, + XPackInfoRequest::new); this.licenseService = licenseService; this.featureSets = featureSets; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index d897d55e5fd..ce06712722c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -13,6 +13,7 @@ import io.netty.channel.ChannelPromise; import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -111,7 +112,7 @@ public class SecurityNetty4Transport extends Netty4Transport { protected void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isNotSslRecordException(e)) { if (logger.isTraceEnabled()) { logger.trace( @@ -119,21 +120,21 @@ public class SecurityNetty4Transport extends Netty4Transport { } else { logger.warn("received plaintext traffic on an encrypted channel, closing connection {}", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isCloseDuringHandshakeException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e); } else { logger.warn("connection {} closed during handshake", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isReceivedCertificateUnknownException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("client did not trust server's certificate, closing connection {}", channel), e); } else { logger.warn("client did not trust this server's certificate, closing connection {}", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else { super.onException(channel, e); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index 3670efeeeee..16e2a74dac8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ssl.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -28,10 +27,9 @@ public class TransportGetCertificateInfoAction extends HandledTransportAction null, null, Collections.emptySet()); TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), licenseService, featureSets); + mock(ActionFilters.class), licenseService, featureSets); License license = mock(License.class); long expiryDate = randomLong(); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 994f5c48578..07035967d2a 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -58,6 +57,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; /** * Performs a series of elasticsearch queries and aggregations to explore @@ -83,10 +83,10 @@ public class TransportGraphExploreAction extends HandledTransportAction)GraphExploreRequest::new); this.searchAction = transportSearchAction; this.licenseState = licenseState; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 083d4ce5b15..bc1d50c7cd9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -64,13 +63,12 @@ public class TransportCloseJobAction extends TransportTasksAction) DeleteCalendarAction.Request::new); this.client = client; this.jobManager = jobManager; this.jobProvider = jobProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 91ae2c118c8..2e4b688fa26 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; @@ -44,10 +43,9 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction) DeleteFilterAction.Request::new); this.clusterService = clusterService; this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 36abf5f95d4..23ca3693df6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -39,10 +38,10 @@ public class TransportDeleteModelSnapshotAction extends HandledTransportAction) GetBucketsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index c81bb264223..da2d2d7970f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -27,6 +26,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.Collections; import java.util.List; +import java.util.function.Supplier; public class TransportGetCalendarEventsAction extends HandledTransportAction { @@ -35,12 +35,10 @@ public class TransportGetCalendarEventsAction extends HandledTransportAction) GetCalendarEventsAction.Request::new); this.jobProvider = jobProvider; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 46cfac5519d..5645d1e1f2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,12 +26,10 @@ public class TransportGetCalendarsAction extends HandledTransportAction) GetCategoriesAction.Request::new); this.jobProvider = jobProvider; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 4264fa2fc2f..28d75956df0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -50,12 +49,10 @@ public class TransportGetFiltersAction extends HandledTransportAction) GetInfluencersAction.Request::new); this.jobProvider = jobProvider; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 78bfe2c7bc6..c0b383b55ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; @@ -56,10 +55,9 @@ public class TransportGetJobsStatsAction extends TransportTasksAction) GetOverallBucketsAction.Request::new); this.clusterService = clusterService; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 879b8c5bba0..7c5fee97d56 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.ml.action; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,10 +28,9 @@ public class TransportGetRecordsAction extends HandledTransportAction) GetRecordsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 8916f6ba084..0d3b8dfa38d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -35,10 +34,9 @@ public class TransportIsolateDatafeedAction extends TransportTasksAction requestSupplier, + Supplier requestSupplier, Supplier responseSupplier, String nodeExecutor, AutodetectProcessManager processManager) { - super(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - requestSupplier, responseSupplier, nodeExecutor); + super(settings, actionName, threadPool, clusterService, transportService, actionFilters, + requestSupplier, responseSupplier, nodeExecutor); this.processManager = processManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index 9d2eae10737..cc2f70eadea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -37,10 +36,9 @@ public class TransportKillProcessAction extends TransportJobTaskAction { @@ -31,10 +31,9 @@ public class TransportMlInfoAction extends HandledTransportAction) MlInfoAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 113a8da7be3..1fbbb7a3681 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -31,11 +30,9 @@ public class TransportPersistJobAction extends TransportJobTaskAction { @@ -41,10 +41,9 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction) PreviewDatafeedAction.Request::new); this.client = client; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index 1393d663fb2..c135ab8322b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -30,6 +29,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Collections; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -39,12 +39,10 @@ public class TransportPutCalendarAction extends HandledTransportAction) PutCalendarAction.Request::new); this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index fc14ef085dd..a8cd2cc8134 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -30,6 +29,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Collections; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -42,10 +42,9 @@ public class TransportPutFilterAction extends HandledTransportAction) PutFilterAction.Request::new); this.client = client; this.jobManager = jobManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index faf6aa80b7a..e7455053d52 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -53,11 +52,10 @@ public class TransportStopDatafeedAction extends TransportTasksAction) ValidateDetectorAction.Request::new); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index 9cd1e5e6aca..990c673a8c1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.xpack.ml.action; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -20,9 +21,9 @@ public class TransportValidateJobConfigAction extends HandledTransportAction) ValidateJobConfigAction.Request::new); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java index 0e7ad29c54d..27d3a68d005 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -265,7 +264,7 @@ public class TransportCloseJobActionTests extends ESTestCase { when(clusterService.state()).thenReturn(clusterState); TransportCloseJobAction transportAction = new TransportCloseJobAction(Settings.EMPTY, - mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), + mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), clusterService, mock(Client.class), mock(Auditor.class), mock(PersistentTasksService.class)); AtomicBoolean gotResponse = new AtomicBoolean(false); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index 0f7ad755c58..b30d8b357c1 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -41,11 +40,9 @@ public class TransportMonitoringBulkAction extends HandledTransportAction { private final ClusterService clusterService; @Inject - public TransportGetRollupCapsAction(Settings settings, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportGetRollupCapsAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters) { super(settings, GetRollupCapsAction.NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, GetRollupCapsAction.Request::new); + (Supplier) GetRollupCapsAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index 283f3a37423..b0adf6f12b4 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -40,12 +39,9 @@ public class TransportGetRollupJobAction extends TransportTasksAction requestInterceptors; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java index 1f7a307396a..022f2e71672 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; @@ -32,10 +31,9 @@ public class TransportClearRealmCacheAction extends TransportNodesAction) AuthenticateRequest::new); this.securityContext = securityContext; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 047b47dfa25..8f0256b7e77 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,10 +26,8 @@ public class TransportChangePasswordAction extends HandledTransportAction) DeleteUserRequest::new); this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 4a57a918c1a..f40db20a339 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -38,10 +37,8 @@ public class TransportGetUsersAction extends HandledTransportAction isStateNotRecovered = e.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); } @Override @@ -98,7 +104,13 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem @Override public void sendRequest(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler handler) { - if (licenseState.isSecurityEnabled() && licenseState.isAuthAllowed()) { + // make a local copy of isStateNotRecovered as this is a volatile variable and it + // is used multiple times in the method. The copy to a local variable allows us to + // guarantee we use the same value wherever we would check the value for the state + // being recovered + final boolean stateNotRecovered = isStateNotRecovered; + final boolean sendWithAuth = (licenseState.isSecurityEnabled() && licenseState.isAuthAllowed()) || stateNotRecovered; + if (sendWithAuth) { // the transport in core normally does this check, BUT since we are serializing to a string header we need to do it // ourselves otherwise we wind up using a version newer than what we can actually send final Version minVersion = Version.min(connection.getVersion(), Version.CURRENT); @@ -108,20 +120,20 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem if (AuthorizationUtils.shouldReplaceUserWithSystem(threadPool.getThreadContext(), action)) { securityContext.executeAsUser(SystemUser.INSTANCE, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) - , handler), sender), minVersion); + , handler), sender, stateNotRecovered), minVersion); } else if (AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadPool.getThreadContext())) { AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadPool.getThreadContext(), securityContext, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) - , handler), sender)); + , handler), sender, stateNotRecovered)); } else if (securityContext.getAuthentication() != null && securityContext.getAuthentication().getVersion().equals(minVersion) == false) { // re-write the authentication since we want the authentication version to match the version of the connection securityContext.executeAfterRewritingAuthentication(original -> sendWithUser(connection, action, request, options, - new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original), handler), sender), - minVersion); + new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original), handler), sender, + stateNotRecovered), minVersion); } else { - sendWithUser(connection, action, request, options, handler, sender); + sendWithUser(connection, action, request, options, handler, sender, stateNotRecovered); } } else { sender.sendRequest(connection, action, request, options, handler); @@ -132,9 +144,10 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem private void sendWithUser(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler handler, - AsyncSender sender) { - // There cannot be a request outgoing from this node that is not associated with a user. - if (securityContext.getAuthentication() == null) { + AsyncSender sender, final boolean stateNotRecovered) { + // There cannot be a request outgoing from this node that is not associated with a user + // unless we do not know the actual license of the cluster + if (securityContext.getAuthentication() == null && stateNotRecovered == false) { // we use an assertion here to ensure we catch this in our testing infrastructure, but leave the ISE for cases we do not catch // in tests and may be hit by a user assertNoAuthentication(action); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index ac586c49457..9667ca675b4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -7,16 +7,16 @@ package org.elasticsearch.xpack.security.transport.netty4; import io.netty.channel.Channel; import io.netty.channel.ChannelHandler; -import io.netty.channel.ChannelHandlerContext; import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.netty4.Netty4Utils; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; @@ -57,37 +57,36 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport } @Override - protected void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - Netty4Utils.maybeDie(cause); + protected void onException(HttpChannel channel, Exception e) { if (!lifecycle.started()) { return; } - if (isNotSslRecordException(cause)) { + if (isNotSslRecordException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}", - ctx.channel()), cause); + channel), e); } else { - logger.warn("received plaintext http traffic on a https channel, closing connection {}", ctx.channel()); + logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel); } - ctx.channel().close(); - } else if (isCloseDuringHandshakeException(cause)) { + CloseableChannel.closeChannel(channel); + } else if (isCloseDuringHandshakeException(e)) { if (logger.isTraceEnabled()) { - logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", ctx.channel()), cause); + logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e); } else { - logger.warn("connection {} closed during ssl handshake", ctx.channel()); + logger.warn("connection {} closed during ssl handshake", channel); } - ctx.channel().close(); - } else if (isReceivedCertificateUnknownException(cause)) { + CloseableChannel.closeChannel(channel); + } else if (isReceivedCertificateUnknownException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}", - ctx.channel()), cause); + channel), e); } else { - logger.warn("http client did not trust this server's certificate, closing connection {}", ctx.channel()); + logger.warn("http client did not trust this server's certificate, closing connection {}", channel); } - ctx.channel().close(); + CloseableChannel.closeChannel(channel); } else { - super.exceptionCaught(ctx, cause); + super.onException(channel, e); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index c143978468d..4080574713c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -365,6 +365,7 @@ public class SSLDriver implements AutoCloseable { @Override public void read(InboundChannelBuffer buffer) throws SSLException { + ensureApplicationBufferSize(buffer); boolean continueUnwrap = true; while (continueUnwrap && networkReadBuffer.position() > 0) { networkReadBuffer.flip(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 39ce1a0150c..5315a944f77 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -14,14 +14,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.nio.InboundChannelBuffer; +import org.elasticsearch.nio.NioSelector; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; -import org.elasticsearch.nio.NioSelector; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.nio.NioTcpChannel; +import org.elasticsearch.transport.nio.NioTcpServerChannel; import org.elasticsearch.transport.nio.NioTransport; -import org.elasticsearch.transport.nio.TcpNioServerSocketChannel; -import org.elasticsearch.transport.nio.TcpNioSocketChannel; import org.elasticsearch.transport.nio.TcpReadWriteHandler; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; @@ -95,11 +95,6 @@ public class SecurityNioTransport extends NioTransport { super.acceptChannel(channel); } - @Override - protected void exceptionCaught(NioSocketChannel channel, Exception exception) { - super.exceptionCaught(channel, exception); - } - private class SecurityTcpChannelFactory extends TcpChannelFactory { private final String profileName; @@ -116,11 +111,11 @@ public class SecurityNioTransport extends NioTransport { } @Override - public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { + public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE); SSLEngine sslEngine = sslService.createSSLEngine(profileConfiguration.getOrDefault(profileName, defaultConfig), null, -1); SSLDriver sslDriver = new SSLDriver(sslEngine, isClient); - TcpNioSocketChannel nioChannel = new TcpNioSocketChannel(profileName, channel); + NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel); Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); @@ -128,15 +123,15 @@ public class SecurityNioTransport extends NioTransport { TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this); InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); SSLChannelContext context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer); nioChannel.setContext(context); return nioChannel; } @Override - public TcpNioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - TcpNioServerSocketChannel nioChannel = new TcpNioServerSocketChannel(profileName, channel); + public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); Consumer acceptor = SecurityNioTransport.this::acceptChannel; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 7fe510f3b4e..2297a5353b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -23,11 +23,16 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.node.MockNode; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.Transport; @@ -41,7 +46,10 @@ import org.elasticsearch.xpack.security.LocalStateSecurity; import org.junit.After; import org.junit.Before; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -115,6 +123,18 @@ public class LicensingTests extends SecurityIntegTestCase { return plugins; } + @Override + protected int maxNumberOfNodes() { + return super.maxNumberOfNodes() + 1; + } + + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false) + .build(); + } + @Before public void resetLicensing() { enableLicensing(); @@ -250,6 +270,34 @@ public class LicensingTests extends SecurityIntegTestCase { } } + public void testNodeJoinWithoutSecurityExplicitlyEnabled() throws Exception { + License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.PLATINUM, License.OperationMode.STANDARD); + enableLicensing(mode); + ensureGreen(); + + Path home = createTempDir(); + Path conf = home.resolve("config"); + Files.createDirectories(conf); + Settings nodeSettings = Settings.builder() + .put(nodeSettings(maxNumberOfNodes() - 1).filter(s -> "xpack.security.enabled".equals(s) == false)) + .put("node.name", "my-test-node") + .put("network.host", "localhost") + .put("cluster.name", internalCluster().getClusterName()) + .put("discovery.zen.minimum_master_nodes", + internalCluster().getInstance(Settings.class).get("discovery.zen.minimum_master_nodes")) + .put("path.home", home) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false) + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "test-zen") + .put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "test-zen") + .build(); + Collection> mockPlugins = Arrays.asList(LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, + MockHttpTransport.TestPlugin.class); + try (Node node = new MockNode(nodeSettings, mockPlugins)) { + node.start(); + ensureStableCluster(cluster().size() + 1); + } + } + private static void assertElasticsearchSecurityException(ThrowingRunnable runnable) { ElasticsearchSecurityException ee = expectThrows(ElasticsearchSecurityException.class, runnable); assertThat(ee.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.SECURITY)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 1690ab652c0..8ad1c61029a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -169,7 +169,8 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas @Override public Collection> nodePlugins() { - return Arrays.asList(LocalStateSecurity.class, Netty4Plugin.class, ReindexPlugin.class, CommonAnalysisPlugin.class); + return Arrays.asList(LocalStateSecurity.class, Netty4Plugin.class, ReindexPlugin.class, CommonAnalysisPlugin.class, + InternalSettingsPlugin.class); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index 0f901830bf1..e7eb1fcc8d7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +45,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -77,7 +76,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -121,7 +120,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 431d6cc613c..9c19bf2097d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -46,7 +45,7 @@ public class TransportGetRolesActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); final List names = randomSubsetOf(size, ReservedRolesStore.names()); @@ -92,7 +91,7 @@ public class TransportGetRolesActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); @@ -144,7 +143,7 @@ public class TransportGetRolesActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); final List expectedNames = new ArrayList<>(); if (all) { @@ -208,7 +207,7 @@ public class TransportGetRolesActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 0ae2477ba03..94a69cc0442 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -47,7 +46,7 @@ public class TransportPutRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -78,7 +77,7 @@ public class TransportPutRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -122,7 +121,7 @@ public class TransportPutRoleActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index be3c86d6a6a..cc67a4facb0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -14,7 +14,6 @@ import java.util.concurrent.atomic.AtomicReference; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -47,9 +46,8 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), - transportService, store); + action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + transportService, store); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index da9eca7a9b6..3ba584440bb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -13,7 +13,6 @@ import java.util.concurrent.atomic.AtomicReference; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -44,8 +43,7 @@ public class TransportPutRoleMappingActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), - transportService, store); + mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 76d888d2c2e..85d1d4a161d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -173,7 +172,7 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); action = new TransportSamlInvalidateSessionAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), tokenService, realms); + mock(ActionFilters.class),tokenService, realms); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index eca52831d9a..79d4978cfd2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; @@ -184,8 +183,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlLogoutAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), realms, tokenService); + action = new TransportSamlLogoutAction(settings, threadPool, transportService, mock(ActionFilters.class), realms, tokenService); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 56e714d7a70..20af681f477 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -41,7 +40,7 @@ public class TransportAuthenticateActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -67,7 +66,7 @@ public class TransportAuthenticateActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -95,7 +94,7 @@ public class TransportAuthenticateActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index 78f6fd26e93..bc1c42f66a5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; @@ -53,7 +52,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(anonymousUser.principal()); @@ -84,7 +83,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -126,7 +125,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -167,7 +166,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index a60a82e87d7..dab63fcc313 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -50,7 +49,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); @@ -79,7 +78,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -109,7 +108,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(reserved.principal()); @@ -139,7 +138,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final boolean found = randomBoolean(); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); @@ -180,7 +179,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); doAnswer(new Answer() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 2ad46723682..fdb37b2f5bd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; @@ -93,7 +92,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); request.usernames(anonymousUser.principal()); @@ -128,7 +127,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -171,7 +170,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); logger.error("names {}", names); GetUsersRequest request = new GetUsersRequest(); @@ -211,7 +210,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); doAnswer(new Answer() { @@ -258,7 +257,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); @@ -306,7 +305,7 @@ public class TransportGetUsersActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index d4a256b8a0c..cb0f643fd89 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; @@ -75,8 +74,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { return null; }).when(authorizationService).roles(eq(user), any(ActionListener.class)); - action = new TransportHasPrivilegesAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), authorizationService); + action = new TransportHasPrivilegesAction(settings, threadPool, transportService, mock(ActionFilters.class), authorizationService); } /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index d059911a680..140508b51a1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -60,7 +59,7 @@ public class TransportPutUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(anonymousUser.principal()); @@ -90,7 +89,7 @@ public class TransportPutUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -131,7 +130,7 @@ public class TransportPutUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(reserved.principal()); @@ -161,7 +160,7 @@ public class TransportPutUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final boolean isCreate = randomBoolean(); final PutUserRequest request = new PutUserRequest(); @@ -207,7 +206,7 @@ public class TransportPutUserActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final PutUserRequest request = new PutUserRequest(); request.username(user.principal()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index 09fd9043752..a8076c21cdb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -65,7 +64,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(settings, threadPool, transportService, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore); + usersStore); SetEnabledRequest request = new SetEnabledRequest(); request.username(new AnonymousUser(settings).principal()); @@ -103,7 +102,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); SetEnabledRequest request = new SetEnabledRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -157,7 +156,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -209,7 +208,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -249,7 +248,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 0bc7c527df3..dd7dda48ae8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -7,11 +7,17 @@ package org.elasticsearch.xpack.security.transport; import org.elasticsearch.Version; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport.Connection; @@ -31,6 +37,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.junit.After; import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; @@ -54,25 +61,33 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { private ThreadContext threadContext; private XPackLicenseState xPackLicenseState; private SecurityContext securityContext; + private ClusterService clusterService; @Override public void setUp() throws Exception { super.setUp(); settings = Settings.builder().put("path.home", createTempDir()).build(); - threadPool = mock(ThreadPool.class); - threadContext = new ThreadContext(settings); - when(threadPool.getThreadContext()).thenReturn(threadContext); + threadPool = new TestThreadPool(getTestName()); + clusterService = ClusterServiceUtils.createClusterService(threadPool); + threadContext = threadPool.getThreadContext(); securityContext = spy(new SecurityContext(settings, threadPool.getThreadContext())); xPackLicenseState = mock(XPackLicenseState.class); when(xPackLicenseState.isAuthAllowed()).thenReturn(true); when(xPackLicenseState.isSecurityEnabled()).thenReturn(true); } + @After + public void stopThreadPool() throws Exception { + clusterService.close(); + terminate(threadPool); + } + public void testSendAsyncUnlicensed() { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener when(xPackLicenseState.isAuthAllowed()).thenReturn(false); AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -92,6 +107,46 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { verifyZeroInteractions(securityContext); } + public void testSendAsyncWithStateNotRecovered() { + SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, + mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), + securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + final boolean securityEnabled = randomBoolean(); + final boolean authAllowed = securityEnabled && randomBoolean(); + when(xPackLicenseState.isAuthAllowed()).thenReturn(authAllowed); + when(xPackLicenseState.isSecurityEnabled()).thenReturn(securityEnabled); + ClusterState notRecovered = ClusterState.builder(clusterService.state()) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK).build()) + .build(); + ClusterServiceUtils.setState(clusterService, notRecovered); + assertTrue(clusterService.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); + + AtomicBoolean calledWrappedSender = new AtomicBoolean(false); + AtomicReference sendingUser = new AtomicReference<>(); + AsyncSender sender = interceptor.interceptSender(new AsyncSender() { + @Override + public void sendRequest(Transport.Connection connection, String action, TransportRequest request, + TransportRequestOptions options, TransportResponseHandler handler) { + if (calledWrappedSender.compareAndSet(false, true) == false) { + fail("sender called more than once!"); + } + sendingUser.set(securityContext.getUser()); + } + }); + Connection connection = mock(Connection.class); + when(connection.getVersion()).thenReturn(Version.CURRENT); + sender.sendRequest(connection, "internal:foo", null, null, null); + assertTrue(calledWrappedSender.get()); + assertEquals(SystemUser.INSTANCE, sendingUser.get()); + verify(xPackLicenseState).isSecurityEnabled(); + if (securityEnabled) { + verify(xPackLicenseState).isAuthAllowed(); + } + verify(securityContext).executeAsUser(any(User.class), any(Consumer.class), eq(Version.CURRENT)); + verifyNoMoreInteractions(xPackLicenseState); + } + public void testSendAsync() throws Exception { final User authUser = randomBoolean() ? new User("authenticator") : null; final User user = new User("test", randomRoles(), authUser); @@ -100,7 +155,8 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -136,7 +192,8 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -167,11 +224,12 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))) { + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService) { @Override void assertNoAuthentication(String action) { } }; + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener assertNull(securityContext.getUser()); AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -203,7 +261,8 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -243,7 +302,8 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 3ef298f3f23..ec925f43abe 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; -import org.elasticsearch.http.netty4.Netty4HttpMockUtil; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; @@ -26,7 +25,6 @@ import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.junit.Before; import javax.net.ssl.SSLEngine; - import java.nio.file.Path; import java.util.Collections; import java.util.Locale; @@ -65,7 +63,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -82,7 +79,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -99,7 +95,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); @@ -116,7 +111,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -131,7 +125,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); EmbeddedChannel ch = new EmbeddedChannel(handler); SSLEngine defaultEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -144,7 +137,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings)); transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); handler = transport.configureServerChannelHandler(); ch = new EmbeddedChannel(handler); SSLEngine customEngine = ch.pipeline().get(SslHandler.class).engine(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 0a7ee13b9e2..c5a6a525d4e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.MockSecureSettings; @@ -118,7 +119,7 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTest protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 68752928166..3cbb2f8a1bc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -25,13 +25,10 @@ public class TransportSqlClearCursorAction extends HandledTransportAction) SqlClearCursorRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 46429e2d508..044683a29ad 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -9,9 +9,9 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -34,13 +34,10 @@ public class TransportSqlQueryAction extends HandledTransportAction) SqlQueryRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 8f494231727..0df3b2ad1bb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -26,11 +26,10 @@ public class TransportSqlTranslateAction extends HandledTransportAction) SqlTranslateRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index 363857f2766..cdb1479eec5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; @@ -26,9 +25,8 @@ public abstract class WatcherTransportAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request, indexNameExpressionResolver); + ActionFilters actionFilters, XPackLicenseState licenseState, Writeable.Reader request) { + super(settings, actionName, threadPool, transportService, actionFilters, request); this.licenseState = licenseState; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 52c63cab69c..6a31b0e5cb0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -56,10 +55,9 @@ public class TransportAckWatchAction extends WatcherTransportAction) DeleteWatchRequest::new); this.client = client; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 2a199c2b3eb..6ccc7518d8b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -66,11 +65,10 @@ public class TransportExecuteWatchAction extends WatcherTransportAction