diff --git a/.gitignore b/.gitignore index 41a151f160c..8b2a7335ade 100644 --- a/.gitignore +++ b/.gitignore @@ -20,10 +20,8 @@ nbactions.xml .gradle/ build/ -# maven stuff (to be removed when trunk becomes 4.x) -*-execution-hints.log -target/ -dependency-reduced-pom.xml +# vscode stuff +.vscode/ # testing stuff **/.local* @@ -43,4 +41,3 @@ html_docs # random old stuff that we should look at the necessity of... /tmp/ eclipse-build - diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 69e90473a7f..03b2674a4cc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -107,6 +107,8 @@ We support development in the Eclipse and IntelliJ IDEs. For Eclipse, the minimum version that we support is [Eclipse Oxygen][eclipse] (version 4.7). For IntelliJ, the minimum version that we support is [IntelliJ 2017.2][intellij]. +### Configuring IDEs And Running Tests + Eclipse users can automatically configure their IDE: `./gradlew eclipse` then `File: Import: Existing Projects into Workspace`. Select the option `Search for nested projects`. Additionally you will want to @@ -144,6 +146,9 @@ For IntelliJ, go to For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to `VM Arguments`. + +### Java Language Formatting Guidelines + Please follow these formatting guidelines: * Java indent is 4 spaces @@ -155,6 +160,33 @@ Please follow these formatting guidelines: * IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value. * Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. +### License Headers + +We require license headers on all Java files. You will notice that all the Java files in +the top-level `x-pack` directory contain a separate license from the rest of the repository. This +directory contains commercial code that is associated with a separate license. It can be helpful +to have the IDE automatically insert the appropriate license header depending which part of the project +contributions are made to. + +#### IntelliJ: Copyright & Scope Profiles + +To have IntelliJ insert the correct license, it is necessary to create to copyright profiles. +These may potentially be called `apache2` and `commercial`. These can be created in +`Preferences/Settings->Editor->Copyright->Copyright Profiles`. To associate these profiles to +their respective directories, two "Scopes" will need to be created. These can be created in +`Preferences/Settings->Appearances & Behavior->Scopes`. When creating scopes, be sure to choose +the `shared` scope type. Create a scope, `apache2`, with +the associated pattern of `!file[group:x-pack]:*/`. This pattern will exclude all the files contained in +the `x-pack` directory. The other scope, `commercial`, will have the inverse pattern of `file[group:x-pack]:*/`. +The two scopes, together, should account for all the files in the project. To associate the scopes +with their copyright-profiles, go into `Preferences/Settings->Editor>Copyright` and use the `+` to add +the associations `apache2/apache2` and `commercial/commercial`. + +Configuring these options in IntelliJ can be quite buggy, so do not be alarmed if you have to open/close +the settings window and/or restart IntelliJ to see your changes take effect. + +### Creating A Distribution + To create a distribution from the source, simply run: ```sh @@ -169,6 +201,8 @@ The archive distributions (tar and zip) can be found under: `./distribution/archives/(tar|zip)/build/distributions/` +### Running The Full Test Suite + Before submitting your changes, run the test suite to make sure that nothing is broken, with: ```sh diff --git a/build.gradle b/build.gradle index 0c61eecd85c..c538c0cb898 100644 --- a/build.gradle +++ b/build.gradle @@ -20,6 +20,7 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.VersionProperties @@ -30,6 +31,7 @@ import org.gradle.api.tasks.wrapper.Wrapper.DistributionType import org.gradle.util.GradleVersion import org.gradle.util.DistributionLocator +import java.nio.file.Files import java.nio.file.Path import java.security.MessageDigest @@ -459,6 +461,59 @@ gradle.projectsEvaluated { } +static void assertLinesInFile(final Path path, final List expectedLines) { + final List actualLines = Files.readAllLines(path) + int line = 0 + for (final String expectedLine : expectedLines) { + final String actualLine = actualLines.get(line) + if (expectedLine != actualLine) { + throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]") + } + line++ + } +} + +/* + * Check that all generated JARs have our NOTICE.txt and an appropriate + * LICENSE.txt in them. We configurate this in gradle but we'd like to + * be extra paranoid. + */ +subprojects { project -> + project.tasks.withType(Jar).whenTaskAdded { jarTask -> + final Task extract = project.task("extract${jarTask.name.capitalize()}", type: LoggedExec) { + dependsOn jarTask + ext.destination = project.buildDir.toPath().resolve("jar-extracted/${jarTask.name}") + commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}", + 'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt' + workingDir destination + doFirst { + project.delete(destination) + Files.createDirectories(destination) + } + } + + final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") { + dependsOn extract + doLast { + final List noticeLines = Files.readAllLines(project.noticeFile.toPath()) + final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt') + assertLinesInFile(noticePath, noticeLines) + } + } + project.check.dependsOn checkNotice + + final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") { + dependsOn extract + doLast { + final List licenseLines = Files.readAllLines(project.licenseFile.toPath()) + final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt') + assertLinesInFile(licensePath, licenseLines) + } + } + project.check.dependsOn checkLicense + } +} + /* Remove assemble on all qa projects because we don't need to publish * artifacts for them. */ gradle.projectsEvaluated { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 59f0eb86d85..ed066ddc96b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -563,16 +563,17 @@ class ClusterFormationTasks { /** Adds a task to execute a command to help setup the cluster */ static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) { - return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { - workingDir node.cwd + return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { Exec exec -> + exec.workingDir node.cwd + exec.environment 'JAVA_HOME', node.getJavaHome() if (Os.isFamily(Os.FAMILY_WINDOWS)) { - executable 'cmd' - args '/C', 'call' + exec.executable 'cmd' + exec.args '/C', 'call' // On Windows the comma character is considered a parameter separator: // argument are wrapped in an ExecArgWrapper that escapes commas - args execArgs.collect { a -> new EscapeCommaWrapper(arg: a) } + exec.args execArgs.collect { a -> new EscapeCommaWrapper(arg: a) } } else { - commandLine execArgs + exec.commandLine execArgs } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 4c1decdc3dd..7a0b9f96781 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -37,8 +37,15 @@ class VagrantTestPlugin implements Plugin { 'ubuntu-1404', ] - /** All onboarded archives by default, available for Bats tests even if not used **/ - static List DISTRIBUTION_ARCHIVES = ['tar', 'rpm', 'deb', 'oss-rpm', 'oss-deb'] + /** All distributions to bring into test VM, whether or not they are used **/ + static List DISTRIBUTIONS = [ + 'archives:tar', + 'archives:oss-tar', + 'packages:rpm', + 'packages:oss-rpm', + 'packages:deb', + 'packages:oss-deb' + ] /** Packages onboarded for upgrade tests **/ static List UPGRADE_FROM_ARCHIVES = ['rpm', 'deb'] @@ -117,13 +124,8 @@ class VagrantTestPlugin implements Plugin { upgradeFromVersion = Version.fromString(upgradeFromVersionRaw) } - DISTRIBUTION_ARCHIVES.each { + DISTRIBUTIONS.each { // Adds a dependency for the current version - if (it == 'tar') { - it = 'archives:tar' - } else { - it = "packages:${it}" - } project.dependencies.add(PACKAGING_CONFIGURATION, project.dependencies.project(path: ":distribution:${it}", configuration: 'default')) } diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 2aa72f0fa7a..609a7cf2ea6 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -535,7 +535,6 @@ - diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index 500130ed397..d68d3b309af 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -48,6 +48,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; @@ -75,6 +76,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; +import org.elasticsearch.rest.action.RestFieldCapabilitiesAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -536,6 +538,16 @@ public final class Request { return new Request(HttpHead.METHOD_NAME, endpoint, params.getParams(), null); } + static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) { + Params params = Params.builder(); + params.withFields(fieldCapabilitiesRequest.fields()); + params.withIndicesOptions(fieldCapabilitiesRequest.indicesOptions()); + + String[] indices = fieldCapabilitiesRequest.indices(); + String endpoint = endpoint(indices, "_field_caps"); + return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), null); + } + static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException { String endpoint = endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval"); Params params = Params.builder(); @@ -712,6 +724,13 @@ public final class Request { return this; } + Params withFields(String[] fields) { + if (fields != null && fields.length > 0) { + return putParam("fields", String.join(",", fields)); + } + return this; + } + Params withMasterTimeout(TimeValue masterTimeout) { return putParam("master_timeout", masterTimeout); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index bf80aa77207..c6d5e947f2c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -30,6 +30,8 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetRequest; @@ -501,6 +503,31 @@ public class RestHighLevelClient implements Closeable { headers); } + /** + * Executes a request using the Field Capabilities API. + * + * See Field Capabilities API + * on elastic.co. + */ + public final FieldCapabilitiesResponse fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest, + Header... headers) throws IOException { + return performRequestAndParseEntity(fieldCapabilitiesRequest, Request::fieldCaps, + FieldCapabilitiesResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a request using the Field Capabilities API. + * + * See Field Capabilities API + * on elastic.co. + */ + public final void fieldCapsAsync(FieldCapabilitiesRequest fieldCapabilitiesRequest, + ActionListener listener, + Header... headers) { + performRequestAsyncAndParseEntity(fieldCapabilitiesRequest, Request::fieldCaps, + FieldCapabilitiesResponse::fromXContent, listener, emptySet(), headers); + } + protected final Resp performRequestAndParseEntity(Req request, CheckedFunction requestConverter, CheckedFunction entityParser, diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index f691c60daa5..0fdeb7555a0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; @@ -89,6 +90,7 @@ import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.index.rankeval.RankEvalSpec; import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; +import org.elasticsearch.rest.action.RestFieldCapabilitiesAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -108,11 +110,14 @@ import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.StringJoiner; import java.util.function.Consumer; import java.util.function.Function; @@ -128,6 +133,8 @@ import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAl import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.nullValue; public class RequestTests extends ESTestCase { @@ -1213,6 +1220,47 @@ public class RequestTests extends ESTestCase { } } + public void testFieldCaps() { + // Create a random request. + String[] indices = randomIndicesNames(0, 5); + String[] fields = generateRandomStringArray(5, 10, false, false); + + FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest() + .indices(indices) + .fields(fields); + + Map indicesOptionsParams = new HashMap<>(); + setRandomIndicesOptions(fieldCapabilitiesRequest::indicesOptions, + fieldCapabilitiesRequest::indicesOptions, + indicesOptionsParams); + + Request request = Request.fieldCaps(fieldCapabilitiesRequest); + + // Verify that the resulting REST request looks as expected. + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String joinedIndices = String.join(",", indices); + if (!joinedIndices.isEmpty()) { + endpoint.add(joinedIndices); + } + endpoint.add("_field_caps"); + + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(4, request.getParameters().size()); + + // Note that we don't check the field param value explicitly, as field names are passed through + // a hash set before being added to the request, and can appear in a non-deterministic order. + assertThat(request.getParameters(), hasKey("fields")); + String[] requestFields = Strings.splitStringByCommaToArray(request.getParameters().get("fields")); + assertEquals(new HashSet<>(Arrays.asList(fields)), + new HashSet<>(Arrays.asList(requestFields))); + + for (Map.Entry param : indicesOptionsParams.entrySet()) { + assertThat(request.getParameters(), hasEntry(param.getKey(), param.getValue())); + } + + assertNull(request.getEntity()); + } + public void testRankEval() throws Exception { RankEvalSpec spec = new RankEvalSpec( Collections.singletonList(new RatedRequest("queryId", Collections.emptyList(), new SearchSourceBuilder())), @@ -1233,7 +1281,6 @@ public class RequestTests extends ESTestCase { assertEquals(3, request.getParameters().size()); assertEquals(expectedParams, request.getParameters()); assertToXContentBody(spec, request.getEntity()); - } public void testSplit() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 01ef0598100..9828041332b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -27,6 +27,9 @@ import org.apache.http.entity.StringEntity; import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.MultiSearchRequest; @@ -96,14 +99,31 @@ public class SearchIT extends ESRestHighLevelClientTestCase { client().performRequest(HttpPut.METHOD_NAME, "/index/type/5", Collections.emptyMap(), doc5); client().performRequest(HttpPost.METHOD_NAME, "/index/_refresh"); - StringEntity doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); + + StringEntity doc = new StringEntity("{\"field\":\"value1\", \"rating\": 7}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/1", Collections.emptyMap(), doc); doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/2", Collections.emptyMap(), doc); - doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); + + StringEntity mappings = new StringEntity( + "{" + + " \"mappings\": {" + + " \"doc\": {" + + " \"properties\": {" + + " \"rating\": {" + + " \"type\": \"keyword\"" + + " }" + + " }" + + " }" + + " }" + + "}}", + ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index2", Collections.emptyMap(), mappings); + doc = new StringEntity("{\"field\":\"value1\", \"rating\": \"good\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/3", Collections.emptyMap(), doc); doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/4", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc); doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); @@ -713,6 +733,57 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertThat(multiSearchResponse.getResponses()[1].getResponse(), nullValue()); } + public void testFieldCaps() throws IOException { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() + .indices("index1", "index2") + .fields("rating", "field"); + + FieldCapabilitiesResponse response = execute(request, + highLevelClient()::fieldCaps, highLevelClient()::fieldCapsAsync); + + // Check the capabilities for the 'rating' field. + assertTrue(response.get().containsKey("rating")); + Map ratingResponse = response.getField("rating"); + assertEquals(2, ratingResponse.size()); + + FieldCapabilities expectedKeywordCapabilities = new FieldCapabilities( + "rating", "keyword", true, true, new String[]{"index2"}, null, null); + assertEquals(expectedKeywordCapabilities, ratingResponse.get("keyword")); + + FieldCapabilities expectedLongCapabilities = new FieldCapabilities( + "rating", "long", true, true, new String[]{"index1"}, null, null); + assertEquals(expectedLongCapabilities, ratingResponse.get("long")); + + // Check the capabilities for the 'field' field. + assertTrue(response.get().containsKey("field")); + Map fieldResponse = response.getField("field"); + assertEquals(1, fieldResponse.size()); + + FieldCapabilities expectedTextCapabilities = new FieldCapabilities( + "field", "text", true, false); + assertEquals(expectedTextCapabilities, fieldResponse.get("text")); + } + + public void testFieldCapsWithNonExistentFields() throws IOException { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() + .indices("index2") + .fields("nonexistent"); + + FieldCapabilitiesResponse response = execute(request, + highLevelClient()::fieldCaps, highLevelClient()::fieldCapsAsync); + assertTrue(response.get().isEmpty()); + } + + public void testFieldCapsWithNonExistentIndices() { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() + .indices("non-existent") + .fields("rating"); + + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(request, highLevelClient()::fieldCaps, highLevelClient()::fieldCapsAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } + private static void assertSearchHeader(SearchResponse searchResponse) { assertThat(searchResponse.getTook().nanos(), greaterThanOrEqualTo(0L)); assertEquals(0, searchResponse.getFailedShards()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 52f6984e651..4400d05a9f8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -21,8 +21,13 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollRequest; @@ -93,6 +98,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -157,6 +164,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // tag::search-source-setter SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices("posts"); searchRequest.source(sourceBuilder); // end::search-source-setter @@ -699,6 +707,65 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testFieldCaps() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + // tag::field-caps-request + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() + .fields("user") + .indices("posts", "authors", "contributors"); + // end::field-caps-request + + // tag::field-caps-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::field-caps-request-indicesOptions + + // tag::field-caps-execute + FieldCapabilitiesResponse response = client.fieldCaps(request); + // end::field-caps-execute + + // tag::field-caps-response + assertThat(response.get().keySet(), contains("user")); + Map userResponse = response.getField("user"); + + assertThat(userResponse.keySet(), containsInAnyOrder("keyword", "text")); // <1> + FieldCapabilities textCapabilities = userResponse.get("keyword"); + + assertTrue(textCapabilities.isSearchable()); + assertFalse(textCapabilities.isAggregatable()); + + assertArrayEquals(textCapabilities.indices(), // <2> + new String[]{"authors", "contributors"}); + assertNull(textCapabilities.nonSearchableIndices()); // <3> + assertArrayEquals(textCapabilities.nonAggregatableIndices(), // <4> + new String[]{"authors"}); + // end::field-caps-response + + // tag::field-caps-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(FieldCapabilitiesResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::field-caps-execute-listener + + // Replace the empty listener by a blocking listener for tests. + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::field-caps-execute-async + client.fieldCapsAsync(request, listener); // <1> + // end::field-caps-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testRankEval() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); @@ -794,7 +861,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { MultiSearchResponse.Item firstResponse = response.getResponses()[0]; // <1> assertNull(firstResponse.getFailure()); // <2> SearchResponse searchResponse = firstResponse.getResponse(); // <3> - assertEquals(3, searchResponse.getHits().getTotalHits()); + assertEquals(4, searchResponse.getHits().getTotalHits()); MultiSearchResponse.Item secondResponse = response.getResponses()[1]; // <4> assertNull(secondResponse.getFailure()); searchResponse = secondResponse.getResponse(); @@ -840,18 +907,35 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { } private void indexSearchTestData() throws IOException { - BulkRequest request = new BulkRequest(); - request.add(new IndexRequest("posts", "doc", "1") + CreateIndexRequest authorsRequest = new CreateIndexRequest("authors") + .mapping("doc", "user", "type=keyword,doc_values=false"); + CreateIndexResponse authorsResponse = highLevelClient().indices().create(authorsRequest); + assertTrue(authorsResponse.isAcknowledged()); + + CreateIndexRequest reviewersRequest = new CreateIndexRequest("contributors") + .mapping("doc", "user", "type=keyword"); + CreateIndexResponse reviewersResponse = highLevelClient().indices().create(reviewersRequest); + assertTrue(reviewersResponse.isAcknowledged()); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest("posts", "doc", "1") .source(XContentType.JSON, "title", "In which order are my Elasticsearch queries executed?", "user", Arrays.asList("kimchy", "luca"), "innerObject", Collections.singletonMap("key", "value"))); - request.add(new IndexRequest("posts", "doc", "2") + bulkRequest.add(new IndexRequest("posts", "doc", "2") .source(XContentType.JSON, "title", "Current status and upcoming changes in Elasticsearch", "user", Arrays.asList("kimchy", "christoph"), "innerObject", Collections.singletonMap("key", "value"))); - request.add(new IndexRequest("posts", "doc", "3") + bulkRequest.add(new IndexRequest("posts", "doc", "3") .source(XContentType.JSON, "title", "The Future of Federated Search in Elasticsearch", "user", Arrays.asList("kimchy", "tanguy"), "innerObject", Collections.singletonMap("key", "value"))); - request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - BulkResponse bulkResponse = highLevelClient().bulk(request); + + bulkRequest.add(new IndexRequest("authors", "doc", "1") + .source(XContentType.JSON, "user", "kimchy")); + bulkRequest.add(new IndexRequest("contributors", "doc", "1") + .source(XContentType.JSON, "user", "tanguy")); + + + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest); assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); } diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 8e0f179634a..bcb928495c5 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -20,7 +20,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks apply plugin: 'elasticsearch.build' -apply plugin: 'ru.vyarus.animalsniffer' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -52,8 +51,6 @@ dependencies { testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" testCompile "org.elasticsearch:securemock:${versions.securemock}" testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" - testCompile "org.codehaus.mojo:animal-sniffer-annotations:1.15" - signature "org.codehaus.mojo.signature:java17:1.0@signature" } forbiddenApisMain { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 8142fea6d25..199b7542e62 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -24,7 +24,6 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; import org.apache.http.HttpHost; -import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -46,8 +45,6 @@ import static org.junit.Assert.fail; /** * Integration test to validate the builder builds a client with the correct configuration */ -//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes -@IgnoreJRERequirement public class RestClientBuilderIntegTests extends RestClientTestCase { private static HttpsServer httpsServer; @@ -60,8 +57,6 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { httpsServer.start(); } - //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes - @IgnoreJRERequirement private static class ResponseHandler implements HttpHandler { @Override public void handle(HttpExchange httpExchange) throws IOException { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index da5a960c2e8..16c192b3977 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -23,7 +23,6 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.apache.http.HttpHost; -import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.Before; @@ -48,8 +47,6 @@ import static org.junit.Assert.assertTrue; * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. * Works against real http servers, multiple hosts. Also tests failover by randomly shutting down hosts. */ -//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes -@IgnoreJRERequirement public class RestClientMultipleHostsIntegTests extends RestClientTestCase { private static HttpServer[] httpServers; @@ -90,8 +87,6 @@ public class RestClientMultipleHostsIntegTests extends RestClientTestCase { return httpServer; } - //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes - @IgnoreJRERequirement private static class ResponseHandler implements HttpHandler { private final int statusCode; diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 59aa2baab96..dd23dbe454f 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -33,7 +33,6 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.util.EntityUtils; -import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -64,8 +63,6 @@ import static org.junit.Assert.fail; * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. * Works against a real http server, one single host. */ -//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes -@IgnoreJRERequirement public class RestClientSingleHostIntegTests extends RestClientTestCase { private static HttpServer httpServer; @@ -91,8 +88,6 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase { return httpServer; } - //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes - @IgnoreJRERequirement private static class ResponseHandler implements HttpHandler { private final int statusCode; diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index af9d5f36245..f2fc297a9e4 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -201,8 +201,7 @@ subprojects { } final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final Path licensePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/LICENSE.txt") - final List actualLines = Files.readAllLines(licensePath) - assertLinesInFile(licensePath, actualLines, licenseLines) + assertLinesInFile(licensePath, licenseLines) } } check.dependsOn checkLicense @@ -213,8 +212,7 @@ subprojects { doLast { final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") final Path noticePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/NOTICE.txt") - final List actualLines = Files.readAllLines(noticePath) - assertLinesInFile(noticePath, actualLines, noticeLines) + assertLinesInFile(noticePath, noticeLines) } } check.dependsOn checkNotice @@ -304,4 +302,3 @@ configure(subprojects.findAll { it.name.contains('zip') }) { } } } - diff --git a/distribution/build.gradle b/distribution/build.gradle index 23d1d7b66ad..c1ab5b76148 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -460,14 +460,3 @@ subprojects { return result } } - -static void assertLinesInFile(final Path path, final List actualLines, final List expectedLines) { - int line = 0 - for (final String expectedLine : expectedLines) { - final String actualLine = actualLines.get(line) - if (expectedLine != actualLine) { - throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]") - } - line++ - } -} diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 863f6a3613d..33f98386a89 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -270,7 +270,7 @@ Closure commonDebConfig(boolean oss) { customFields['License'] = 'Elastic-License' } - version = project.version + version = project.version.replace('-', '~') packageGroup 'web' requires 'bash' requires 'libc6' @@ -415,8 +415,7 @@ subprojects { "License: " + expectedLicense) final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final List expectedLines = header + licenseLines.collect { " " + it } - final List actualLines = Files.readAllLines(copyrightPath) - assertLinesInFile(copyrightPath, actualLines, expectedLines) + assertLinesInFile(copyrightPath, expectedLines) } } } else { @@ -432,8 +431,7 @@ subprojects { } final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") - final List actualLines = Files.readAllLines(licensePath) - assertLinesInFile(licensePath, actualLines, licenseLines) + assertLinesInFile(licensePath, licenseLines) } } } @@ -444,8 +442,7 @@ subprojects { doLast { final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") - final List actualLines = Files.readAllLines(noticePath) - assertLinesInFile(noticePath, actualLines, noticeLines) + assertLinesInFile(noticePath, noticeLines) } } check.dependsOn checkNotice diff --git a/docs/java-rest/high-level/search/field-caps.asciidoc b/docs/java-rest/high-level/search/field-caps.asciidoc new file mode 100644 index 00000000000..fef30f629ca --- /dev/null +++ b/docs/java-rest/high-level/search/field-caps.asciidoc @@ -0,0 +1,82 @@ +[[java-rest-high-field-caps]] +=== Field Capabilities API + +The field capabilities API allows for retrieving the capabilities of fields across multiple indices. + +[[java-rest-high-field-caps-request]] +==== Field Capabilities Request + +A `FieldCapabilitiesRequest` contains a list of fields to get capabilities for, +should be returned, plus an optional list of target indices. If no indices +are provided, the request will be executed on all indices. + +Note that fields parameter supports wildcard notation. For example, providing `text_*` +will cause all fields that match the expression to be returned. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-request] +-------------------------------------------------- + +[[java-rest-high-field-caps-request-optional]] +===== Optional arguments + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded. + +[[java-rest-high-field-caps-sync]] +==== Synchronous Execution + +The `fieldCaps` method executes the request synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-execute] +-------------------------------------------------- + +[[java-rest-high-field-caps-async]] +==== Asynchronous Execution + +The `fieldCapsAsync` method executes the request asynchronously, +calling the provided `ActionListener` when the response is ready: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-execute-async] +-------------------------------------------------- +<1> The `FieldCapabilitiesRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +completes, the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `FieldCapabilitiesResponse` is constructed as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `FieldCapabilitiesRequest` fails. + +[[java-rest-high-field-caps-response]] +==== FieldCapabilitiesResponse + +For each requested field, the returned `FieldCapabilitiesResponse` contains its type +and whether or not it can be searched or aggregated on. The response also gives +information about how each index contributes to the field's capabilities. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-response] +-------------------------------------------------- +<1> The `user` field has two possible types, `keyword` and `text`. +<2> This field only has type `keyword` in the `authors` and `contributors` indices. +<3> Null, since the field is searchable in all indices for which it has the `keyword` type. +<4> The `user` field is not aggregatable in the `authors` index. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 1f3d7a37443..1c0e09c6c07 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -32,11 +32,13 @@ The Java High Level REST Client supports the following Search APIs: * <> * <> * <> +* <> * <> include::search/search.asciidoc[] include::search/scroll.asciidoc[] include::search/multi-search.asciidoc[] +include::search/field-caps.asciidoc[] include::search/rank-eval.asciidoc[] == Miscellaneous APIs diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 30ea2832a70..c2d1614ad6e 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -27,11 +27,13 @@ POST /sales/_search?size=0 // CONSOLE // TEST[setup:sales] -Available expressions for interval: `year`, `quarter`, `month`, `week`, `day`, `hour`, `minute`, `second` +Available expressions for interval: `year` (`1y`), `quarter` (`1q`), `month` (`1M`), `week` (`1w`), +`day` (`1d`), `hour` (`1h`), `minute` (`1m`), `second` (`1s`) Time values can also be specified via abbreviations supported by <> parsing. Note that fractional time values are not supported, but you can address this by shifting to another -time unit (e.g., `1.5h` could instead be specified as `90m`). +time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than +than days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not). [source,js] -------------------------------------------------- diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index 782a625586b..49f31eb2d75 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -39,11 +39,14 @@ The result of the above delete operation is: [[delete-versioning]] === Versioning -Each document indexed is versioned. When deleting a document, the -`version` can be specified to make sure the relevant document we are -trying to delete is actually being deleted and it has not changed in the -meantime. Every write operation executed on a document, deletes included, -causes its version to be incremented. +Each document indexed is versioned. When deleting a document, the `version` can +be specified to make sure the relevant document we are trying to delete is +actually being deleted and it has not changed in the meantime. Every write +operation executed on a document, deletes included, causes its version to be +incremented. The version number of a deleted document remains available for a +short time after deletion to allow for control of concurrent operations. The +length of time for which a deleted document's version remains available is +determined by the `index.gc_deletes` index setting and defaults to 60 seconds. [float] [[delete-routing]] diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 0ab742108b9..ed0077a629d 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -214,6 +214,27 @@ specific index module: The maximum length of regex that can be used in Regexp Query. Defaults to `1000`. + `index.routing.allocation.enable`:: + + Controls shard allocation for this index. It can be set to: + * `all` (default) - Allows shard allocation for all shards. + * `primaries` - Allows shard allocation only for primary shards. + * `new_primaries` - Allows shard allocation only for newly-created primary shards. + * `none` - No shard allocation is allowed. + + `index.routing.rebalance.enable`:: + + Enables shard rebalancing for this index. It can be set to: + * `all` (default) - Allows shard rebalancing for all shards. + * `primaries` - Allows shard rebalancing only for primary shards. + * `replicas` - Allows shard rebalancing only for replica shards. + * `none` - No shard rebalancing is allowed. + + `index.gc_deletes`:: + + The length of time that a <> remains available for <>. + Defaults to `60s`. + [float] === Settings in other index modules diff --git a/docs/reference/migration/migrate_6_4.asciidoc b/docs/reference/migration/migrate_6_4.asciidoc new file mode 100644 index 00000000000..a761509597f --- /dev/null +++ b/docs/reference/migration/migrate_6_4.asciidoc @@ -0,0 +1,12 @@ +[[breaking-changes-6.4]] +== Breaking changes in 6.4 + +[[breaking_64_api_changes]] +=== API changes + +==== Field capabilities request format + +In the past, `fields` could be provided either as a parameter, or as part of the request +body. Specifying `fields` in the request body is now deprecated, and instead they should +always be supplied through a request parameter. In 7.0.0, the field capabilities API will +not accept `fields` supplied in the request body. diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 50c35a4a736..b7a65d98592 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -41,7 +41,7 @@ addressable from the outside. Defaults to the actual port assigned via |`transport.tcp.connect_timeout` |The socket connect timeout setting (in time setting format). Defaults to `30s`. -|`transport.tcp.compress` |Set to `true` to enable compression (LZF) +|`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`) between all nodes. Defaults to `false`. |`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere. diff --git a/docs/reference/search/field-caps.asciidoc b/docs/reference/search/field-caps.asciidoc index 8329d96131d..6cb483e7a25 100644 --- a/docs/reference/search/field-caps.asciidoc +++ b/docs/reference/search/field-caps.asciidoc @@ -20,7 +20,7 @@ GET twitter/_field_caps?fields=rating // CONSOLE // TEST[setup:twitter] -Alternatively the `fields` option can also be defined in the request body: +Alternatively the `fields` option can also be defined in the request body. deprecated[6.4.0, Please use a request parameter instead.] [source,js] -------------------------------------------------- @@ -30,6 +30,7 @@ POST _field_caps } -------------------------------------------------- // CONSOLE +// TEST[warning:Specifying a request body is deprecated -- the [fields] request parameter should be used instead.] This is equivalent to the previous request. diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc index be725aaf362..0fd6979ef95 100644 --- a/docs/reference/search/request/scroll.asciidoc +++ b/docs/reference/search/request/scroll.asciidoc @@ -78,9 +78,9 @@ returned with each batch of results. Each call to the `scroll` API returns the next batch of results until there are no more results left to return, ie the `hits` array is empty. -IMPORTANT: The initial search request and each subsequent scroll request -returns a new `_scroll_id` -- only the most recent `_scroll_id` should be -used. +IMPORTANT: The initial search request and each subsequent scroll request each +return a `_scroll_id`, which may change with each request -- only the most +recent `_scroll_id` should be used. NOTE: If the request specifies aggregations, only the initial search response will contain the aggregations results. diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 758e5eb9972..bf35918ad6e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -128,7 +128,7 @@ public class RenameProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); - String newFieldName = RandomDocumentPicks.randomFieldName(random()); + String newFieldName = randomValueOtherThanMany(ingestDocument::hasField, () -> RandomDocumentPicks.randomFieldName(random())); Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, newFieldName, false); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(false)); diff --git a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats index 3536c2a207d..1a3704c3317 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats @@ -55,7 +55,8 @@ setup() { } @test "[TAR] archive is available" { - count=$(find . -type f -name 'elasticsearch*.tar.gz' | wc -l) + local version=$(cat version) + count=$(find . -type f -name "${PACKAGE_NAME}-${version}.tar.gz" | wc -l) [ "$count" -eq 1 ] } diff --git a/qa/vagrant/src/test/resources/packaging/utils/tar.bash b/qa/vagrant/src/test/resources/packaging/utils/tar.bash index 9b4bc76d841..4ded1f73514 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/tar.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/tar.bash @@ -35,10 +35,12 @@ install_archive() { export ESHOME=${1:-/tmp/elasticsearch} + local version=$(cat version) + echo "Unpacking tarball to $ESHOME" rm -rf /tmp/untar mkdir -p /tmp/untar - tar -xzpf elasticsearch*.tar.gz -C /tmp/untar + tar -xzpf "${PACKAGE_NAME}-${version}.tar.gz" -C /tmp/untar find /tmp/untar -depth -type d -name 'elasticsearch*' -exec mv {} "$ESHOME" \; > /dev/null @@ -79,6 +81,8 @@ export_elasticsearch_paths() { export ESSCRIPTS="$ESCONFIG/scripts" export ESDATA="$ESHOME/data" export ESLOG="$ESHOME/logs" + + export PACKAGE_NAME=${PACKAGE_NAME:-"elasticsearch-oss"} } # Checks that all directories & files are correctly installed diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index ec6d0902ac9..21bb452430e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -19,11 +19,14 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -36,6 +39,13 @@ import java.util.List; * Describes the capabilities of a field optionally merged across multiple indices. */ public class FieldCapabilities implements Writeable, ToXContentObject { + private static final ParseField TYPE_FIELD = new ParseField("type"); + private static final ParseField SEARCHABLE_FIELD = new ParseField("searchable"); + private static final ParseField AGGREGATABLE_FIELD = new ParseField("aggregatable"); + private static final ParseField INDICES_FIELD = new ParseField("indices"); + private static final ParseField NON_SEARCHABLE_INDICES_FIELD = new ParseField("non_searchable_indices"); + private static final ParseField NON_AGGREGATABLE_INDICES_FIELD = new ParseField("non_aggregatable_indices"); + private final String name; private final String type; private final boolean isSearchable; @@ -52,7 +62,7 @@ public class FieldCapabilities implements Writeable, ToXContentObject { * @param isSearchable Whether this field is indexed for search. * @param isAggregatable Whether this field can be aggregated on. */ - FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) { + public FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) { this(name, type, isSearchable, isAggregatable, null, null, null); } @@ -69,7 +79,7 @@ public class FieldCapabilities implements Writeable, ToXContentObject { * @param nonAggregatableIndices The list of indices where this field is not aggregatable, * or null if the field is aggregatable in all indices. */ - FieldCapabilities(String name, String type, + public FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable, String[] indices, String[] nonSearchableIndices, @@ -83,7 +93,7 @@ public class FieldCapabilities implements Writeable, ToXContentObject { this.nonAggregatableIndices = nonAggregatableIndices; } - FieldCapabilities(StreamInput in) throws IOException { + public FieldCapabilities(StreamInput in) throws IOException { this.name = in.readString(); this.type = in.readString(); this.isSearchable = in.readBoolean(); @@ -107,22 +117,47 @@ public class FieldCapabilities implements Writeable, ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("type", type); - builder.field("searchable", isSearchable); - builder.field("aggregatable", isAggregatable); + builder.field(TYPE_FIELD.getPreferredName(), type); + builder.field(SEARCHABLE_FIELD.getPreferredName(), isSearchable); + builder.field(AGGREGATABLE_FIELD.getPreferredName(), isAggregatable); if (indices != null) { - builder.field("indices", indices); + builder.field(INDICES_FIELD.getPreferredName(), indices); } if (nonSearchableIndices != null) { - builder.field("non_searchable_indices", nonSearchableIndices); + builder.field(NON_SEARCHABLE_INDICES_FIELD.getPreferredName(), nonSearchableIndices); } if (nonAggregatableIndices != null) { - builder.field("non_aggregatable_indices", nonAggregatableIndices); + builder.field(NON_AGGREGATABLE_INDICES_FIELD.getPreferredName(), nonAggregatableIndices); } builder.endObject(); return builder; } + public static FieldCapabilities fromXContent(String name, XContentParser parser) throws IOException { + return PARSER.parse(parser, name); + } + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "field_capabilities", + true, + (a, name) -> new FieldCapabilities(name, + (String) a[0], + (boolean) a[1], + (boolean) a[2], + a[3] != null ? ((List) a[3]).toArray(new String[0]) : null, + a[4] != null ? ((List) a[4]).toArray(new String[0]) : null, + a[5] != null ? ((List) a[5]).toArray(new String[0]) : null)); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), SEARCHABLE_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), AGGREGATABLE_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_SEARCHABLE_INDICES_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_AGGREGATABLE_INDICES_FIELD); + } + /** * The name of the field. */ diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index b04f8820763..264fa21cf91 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -61,14 +61,18 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements Ind /** * Returns true iff the results should be merged. + * + * Note that when using the high-level REST client, results are always merged (this flag is always considered 'true'). */ boolean isMergeResults() { return mergeResults; } /** - * if set to true the response will contain only a merged view of the per index field capabilities. Otherwise only - * unmerged per index field capabilities are returned. + * If set to true the response will contain only a merged view of the per index field capabilities. + * Otherwise only unmerged per index field capabilities are returned. + * + * Note that when using the high-level REST client, results are always merged (this flag is always considered 'true'). */ void setMergeResults(boolean mergeResults) { this.mergeResults = mergeResults; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index 4b1bcf57589..5e2202ac073 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -21,20 +21,29 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; import java.io.IOException; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; /** * Response for {@link FieldCapabilitiesRequest} requests. */ public class FieldCapabilitiesResponse extends ActionResponse implements ToXContentFragment { + private static final ParseField FIELDS_FIELD = new ParseField("fields"); + private Map> responseMap; private List indexResponses; @@ -114,10 +123,42 @@ public class FieldCapabilitiesResponse extends ActionResponse implements ToXCont @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("fields", responseMap); + builder.field(FIELDS_FIELD.getPreferredName(), responseMap); return builder; } + public static FieldCapabilitiesResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("field_capabilities_response", true, + a -> new FieldCapabilitiesResponse( + ((List>>) a[0]).stream() + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)))); + + static { + PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> { + Map typeToCapabilities = parseTypeToCapabilities(p, n); + return new Tuple<>(n, typeToCapabilities); + }, FIELDS_FIELD); + } + + private static Map parseTypeToCapabilities(XContentParser parser, String name) throws IOException { + Map typeToCapabilities = new HashMap<>(); + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String type = parser.currentName(); + FieldCapabilities capabilities = FieldCapabilities.fromXContent(name, parser); + typeToCapabilities.put(type, capabilities); + } + return typeToCapabilities; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index aad2638bd9d..91aec1171dc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -37,8 +37,10 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.transport.Transport; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -62,6 +64,7 @@ abstract class AbstractSearchAsyncAction exten private final long clusterStateVersion; private final Map aliasFilter; private final Map concreteIndexBoosts; + private final Map> indexRoutings; private final SetOnce> shardFailures = new SetOnce<>(); private final Object shardFailuresMutex = new Object(); private final AtomicInteger successfulOps = new AtomicInteger(); @@ -72,6 +75,7 @@ abstract class AbstractSearchAsyncAction exten protected AbstractSearchAsyncAction(String name, Logger logger, SearchTransportService searchTransportService, BiFunction nodeIdToConnection, Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, Executor executor, SearchRequest request, ActionListener listener, GroupShardsIterator shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion, @@ -89,6 +93,7 @@ abstract class AbstractSearchAsyncAction exten this.clusterStateVersion = clusterStateVersion; this.concreteIndexBoosts = concreteIndexBoosts; this.aliasFilter = aliasFilter; + this.indexRoutings = indexRoutings; this.results = resultConsumer; this.clusters = clusters; } @@ -128,17 +133,17 @@ abstract class AbstractSearchAsyncAction exten onPhaseFailure(currentPhase, "all shards failed", cause); } else { Boolean allowPartialResults = request.allowPartialSearchResults(); - assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; + assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; if (allowPartialResults == false && shardFailures.get() != null ){ if (logger.isDebugEnabled()) { final ShardOperationFailedException[] shardSearchFailures = ExceptionsHelper.groupBy(buildShardFailures()); Throwable cause = shardSearchFailures.length == 0 ? null : ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0]; - logger.debug(() -> new ParameterizedMessage("{} shards failed for phase: [{}]", + logger.debug(() -> new ParameterizedMessage("{} shards failed for phase: [{}]", shardSearchFailures.length, getName()), cause); } - onPhaseFailure(currentPhase, "Partial shards failure", null); - } else { + onPhaseFailure(currentPhase, "Partial shards failure", null); + } else { if (logger.isTraceEnabled()) { final String resultsFrom = results.getSuccessfulResults() .map(r -> r.getSearchShardTarget().toString()).collect(Collectors.joining(",")); @@ -271,14 +276,14 @@ abstract class AbstractSearchAsyncAction exten @Override public final SearchResponse buildSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId) { - + ShardSearchFailure[] failures = buildShardFailures(); Boolean allowPartialResults = request.allowPartialSearchResults(); assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; if (allowPartialResults == false && failures.length > 0){ - raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures)); - } - + raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures)); + } + return new SearchResponse(internalSearchResponse, scrollId, getNumShards(), successfulOps.get(), skippedOps.get(), buildTookInMillis(), failures, clusters); } @@ -318,8 +323,11 @@ abstract class AbstractSearchAsyncAction exten AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); + String indexName = shardIt.shardId().getIndex().getName(); + final String[] routings = indexRoutings.getOrDefault(indexName, Collections.emptySet()) + .toArray(new String[0]); return new ShardSearchTransportRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), - filter, indexBoost, timeProvider.getAbsoluteStartMillis(), clusterAlias); + filter, indexBoost, timeProvider.getAbsoluteStartMillis(), clusterAlias, routings); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index fe42d503936..0873ff40f75 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -27,6 +27,7 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiFunction; import java.util.function.Function; @@ -47,6 +48,7 @@ final class CanMatchPreFilterSearchPhase extends AbstractSearchAsyncAction nodeIdToConnection, Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, Executor executor, SearchRequest request, ActionListener listener, GroupShardsIterator shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion, @@ -56,9 +58,9 @@ final class CanMatchPreFilterSearchPhase extends AbstractSearchAsyncAction extends if (shardsIts.size() > 0) { int maxConcurrentShardRequests = Math.min(this.maxConcurrentShardRequests, shardsIts.size()); final boolean success = shardExecutionIndex.compareAndSet(0, maxConcurrentShardRequests); - assert success; + assert success; assert request.allowPartialSearchResults() != null : "SearchRequest missing setting for allowPartialSearchResults"; if (request.allowPartialSearchResults() == false) { final StringBuilder missingShards = new StringBuilder(); @@ -140,7 +140,7 @@ abstract class InitialSearchPhase extends final SearchShardIterator shardRoutings = shardsIts.get(index); if (shardRoutings.size() == 0) { if(missingShards.length() >0 ){ - missingShards.append(", "); + missingShards.append(", "); } missingShards.append(shardRoutings.shardId()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 9bcbe1c8e67..0782fbb310b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -37,11 +38,13 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction SearchDfsQueryThenFetchAsyncAction(final Logger logger, final SearchTransportService searchTransportService, final BiFunction nodeIdToConnection, final Map aliasFilter, - final Map concreteIndexBoosts, final SearchPhaseController searchPhaseController, final Executor executor, + final Map concreteIndexBoosts, final Map> indexRoutings, + final SearchPhaseController searchPhaseController, final Executor executor, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, final TransportSearchAction.SearchTimeProvider timeProvider, final long clusterStateVersion, final SearchTask task, SearchResponse.Clusters clusters) { - super("dfs", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, executor, request, listener, + super("dfs", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, indexRoutings, + executor, request, listener, shardsIts, timeProvider, clusterStateVersion, task, new ArraySearchPhaseResults<>(shardsIts.size()), request.getMaxConcurrentShardRequests(), clusters); this.searchPhaseController = searchPhaseController; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index b7669312b00..bbd84011de0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -37,13 +38,14 @@ final class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction nodeIdToConnection, final Map aliasFilter, - final Map concreteIndexBoosts, final SearchPhaseController searchPhaseController, final Executor executor, + final Map concreteIndexBoosts, final Map> indexRoutings, + final SearchPhaseController searchPhaseController, final Executor executor, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, final TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion, SearchTask task, SearchResponse.Clusters clusters) { - super("query", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, executor, request, listener, - shardsIts, timeProvider, clusterStateVersion, task, searchPhaseController.newSearchPhaseResults(request, shardsIts.size()), - request.getMaxConcurrentShardRequests(), clusters); + super("query", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, indexRoutings, + executor, request, listener, shardsIts, timeProvider, clusterStateVersion, task, + searchPhaseController.newSearchPhaseResults(request, shardsIts.size()), request.getMaxConcurrentShardRequests(), clusters); this.searchPhaseController = searchPhaseController; } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index bd533ce7b09..6b39af478f4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -297,6 +297,7 @@ public class TransportSearchAction extends HandledTransportAction aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); + routingMap = routingMap == null ? Collections.emptyMap() : Collections.unmodifiableMap(routingMap); String[] concreteIndices = new String[indices.length]; for (int i = 0; i < indices.length; i++) { concreteIndices[i] = indices[i].getName(); @@ -350,7 +351,7 @@ public class TransportSearchAction extends HandledTransportAction shardIterators) { @@ -380,17 +381,20 @@ public class TransportSearchAction extends HandledTransportAction shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, - long clusterStateVersion, Map aliasFilter, + long clusterStateVersion, + Map aliasFilter, Map concreteIndexBoosts, - ActionListener listener, boolean preFilter, + Map> indexRoutings, + ActionListener listener, + boolean preFilter, SearchResponse.Clusters clusters) { Executor executor = threadPool.executor(ThreadPool.Names.SEARCH); if (preFilter) { return new CanMatchPreFilterSearchPhase(logger, searchTransportService, connectionLookup, - aliasFilter, concreteIndexBoosts, executor, searchRequest, listener, shardIterators, + aliasFilter, concreteIndexBoosts, indexRoutings, executor, searchRequest, listener, shardIterators, timeProvider, clusterStateVersion, task, (iter) -> { AbstractSearchAsyncAction action = searchAsyncAction(task, searchRequest, iter, timeProvider, connectionLookup, - clusterStateVersion, aliasFilter, concreteIndexBoosts, listener, false, clusters); + clusterStateVersion, aliasFilter, concreteIndexBoosts, indexRoutings, listener, false, clusters); return new SearchPhase(action.getName()) { @Override public void run() throws IOException { @@ -403,14 +407,14 @@ public class TransportSearchAction extends HandledTransportAction getValues() { return values; } @@ -116,9 +119,10 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder 0)) { + GroupShardsIterator group = buildShardIterator(clusterService, request); + assert group.size() <= numShards : "index routing shards: " + group.size() + + " cannot be greater than total number of shards: " + numShards; + if (group.size() < numShards) { + /** + * The routing of this request targets a subset of the shards of this index so we need to we retrieve + * the original {@link GroupShardsIterator} and compute the request shard id and number of + * shards from it. + * This behavior has been added in {@link Version#V_6_4_0} so if there is another node in the cluster + * with an older version we use the original shard id and number of shards in order to ensure that all + * slices use the same numbers. + */ + numShards = group.size(); + int ord = 0; + shardId = -1; + // remap the original shard id with its index (position) in the sorted shard iterator. + for (ShardIterator it : group) { + assert it.shardId().getIndex().equals(request.shardId().getIndex()); + if (request.shardId().equals(it.shardId())) { + shardId = ord; + break; + } + ++ord; + } + assert shardId != -1 : "shard id: " + request.shardId().getId() + " not found in index shard routing"; + } + } + String field = this.field; boolean useTermQuery = false; if ("_uid".equals(field)) { @@ -273,6 +319,17 @@ public class SliceBuilder implements Writeable, ToXContentObject { return new MatchAllDocsQuery(); } + /** + * Returns the {@link GroupShardsIterator} for the provided request. + */ + private GroupShardsIterator buildShardIterator(ClusterService clusterService, ShardSearchRequest request) { + final ClusterState state = clusterService.state(); + String[] indices = new String[] { request.shardId().getIndex().getName() }; + Map> routingMap = request.indexRoutings().length > 0 ? + Collections.singletonMap(indices[0], Sets.newHashSet(request.indexRoutings())) : null; + return clusterService.operationRouting().searchShards(state, indices, routingMap, request.preference()); + } + @Override public String toString() { return Strings.toString(this, true, true); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java index 8543b35569a..9f893ada9c7 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; @@ -80,7 +82,7 @@ public class FieldCapabilitiesRequestTests extends ESTestCase { } - public void testFieldCapsRequestSerialization() throws IOException { + public void testSerialization() throws IOException { for (int i = 0; i < 20; i++) { FieldCapabilitiesRequest request = randomRequest(); BytesStreamOutput output = new BytesStreamOutput(); @@ -93,4 +95,11 @@ public class FieldCapabilitiesRequestTests extends ESTestCase { assertEquals(deserialized.hashCode(), request.hashCode()); } } + + public void testValidation() { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() + .indices("index2"); + ActionRequestValidationException exception = request.validate(); + assertNotNull(exception); + } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index 2eaf1d4832f..c8bd5d5188b 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -19,42 +19,152 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Predicate; -public class FieldCapabilitiesResponseTests extends ESTestCase { - private FieldCapabilitiesResponse randomResponse() { - Map > fieldMap = new HashMap<> (); - int numFields = randomInt(10); - for (int i = 0; i < numFields; i++) { - String fieldName = randomAlphaOfLengthBetween(5, 10); - int numIndices = randomIntBetween(1, 5); - Map indexFieldMap = new HashMap<> (); - for (int j = 0; j < numIndices; j++) { - String index = randomAlphaOfLengthBetween(10, 20); - indexFieldMap.put(index, FieldCapabilitiesTests.randomFieldCaps()); - } - fieldMap.put(fieldName, indexFieldMap); - } - return new FieldCapabilitiesResponse(fieldMap); +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; + +public class FieldCapabilitiesResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected FieldCapabilitiesResponse doParseInstance(XContentParser parser) throws IOException { + return FieldCapabilitiesResponse.fromXContent(parser); } - public void testSerialization() throws IOException { - for (int i = 0; i < 20; i++) { - FieldCapabilitiesResponse response = randomResponse(); - BytesStreamOutput output = new BytesStreamOutput(); - response.writeTo(output); - output.flush(); - StreamInput input = output.bytes().streamInput(); - FieldCapabilitiesResponse deserialized = new FieldCapabilitiesResponse(); - deserialized.readFrom(input); - assertEquals(deserialized, response); - assertEquals(deserialized.hashCode(), response.hashCode()); + @Override + protected FieldCapabilitiesResponse createBlankInstance() { + return new FieldCapabilitiesResponse(); + } + + @Override + protected FieldCapabilitiesResponse createTestInstance() { + Map> responses = new HashMap<>(); + + String[] fields = generateRandomStringArray(5, 10, false, true); + assertNotNull(fields); + + for (String field : fields) { + Map typesToCapabilities = new HashMap<>(); + String[] types = generateRandomStringArray(5, 10, false, false); + assertNotNull(types); + + for (String type : types) { + typesToCapabilities.put(type, FieldCapabilitiesTests.randomFieldCaps(field)); + } + responses.put(field, typesToCapabilities); } + return new FieldCapabilitiesResponse(responses); + } + + @Override + protected FieldCapabilitiesResponse mutateInstance(FieldCapabilitiesResponse response) { + Map> mutatedResponses = new HashMap<>(response.get()); + + int mutation = response.get().isEmpty() ? 0 : randomIntBetween(0, 2); + + switch (mutation) { + case 0: + String toAdd = randomAlphaOfLength(10); + mutatedResponses.put(toAdd, Collections.singletonMap( + randomAlphaOfLength(10), + FieldCapabilitiesTests.randomFieldCaps(toAdd))); + break; + case 1: + String toRemove = randomFrom(mutatedResponses.keySet()); + mutatedResponses.remove(toRemove); + break; + case 2: + String toReplace = randomFrom(mutatedResponses.keySet()); + mutatedResponses.put(toReplace, Collections.singletonMap( + randomAlphaOfLength(10), + FieldCapabilitiesTests.randomFieldCaps(toReplace))); + break; + } + return new FieldCapabilitiesResponse(mutatedResponses); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // Disallow random fields from being inserted under the 'fields' key, as this + // map only contains field names, and also under 'fields.FIELD_NAME', as these + // maps only contain type names. + return field -> field.matches("fields(\\.\\w+)?"); + } + + public void testToXContent() throws IOException { + FieldCapabilitiesResponse response = createSimpleResponse(); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON) + .startObject(); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + String generatedResponse = BytesReference.bytes(builder).utf8ToString(); + assertEquals(( + "{" + + " \"fields\": {" + + " \"rating\": { " + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"searchable\": false," + + " \"aggregatable\": true," + + " \"indices\": [\"index3\", \"index4\"]," + + " \"non_searchable_indices\": [\"index4\"] " + + " }," + + " \"long\": {" + + " \"type\": \"long\"," + + " \"searchable\": true," + + " \"aggregatable\": false," + + " \"indices\": [\"index1\", \"index2\"]," + + " \"non_aggregatable_indices\": [\"index1\"] " + + " }" + + " }," + + " \"title\": { " + + " \"text\": {" + + " \"type\": \"text\"," + + " \"searchable\": true," + + " \"aggregatable\": false" + + " }" + + " }" + + " }" + + "}").replaceAll("\\s+", ""), generatedResponse); + } + + private static FieldCapabilitiesResponse createSimpleResponse() { + Map titleCapabilities = new HashMap<>(); + titleCapabilities.put("text", new FieldCapabilities("title", "text", true, false)); + + Map ratingCapabilities = new HashMap<>(); + ratingCapabilities.put("long", new FieldCapabilities("rating", "long", + true, false, + new String[]{"index1", "index2"}, + null, + new String[]{"index1"})); + ratingCapabilities.put("keyword", new FieldCapabilities("rating", "keyword", + false, true, + new String[]{"index3", "index4"}, + new String[]{"index4"}, + null)); + + Map> responses = new HashMap<>(); + responses.put("title", titleCapabilities); + responses.put("rating", ratingCapabilities); + return new FieldCapabilitiesResponse(responses); } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java index 53c27645bf2..0237ace962a 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java @@ -20,16 +20,26 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import java.io.IOException; import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; -public class FieldCapabilitiesTests extends AbstractWireSerializingTestCase { +public class FieldCapabilitiesTests extends AbstractSerializingTestCase { + private static final String FIELD_NAME = "field"; + + @Override + protected FieldCapabilities doParseInstance(XContentParser parser) throws IOException { + return FieldCapabilities.fromXContent(FIELD_NAME, parser); + } + @Override protected FieldCapabilities createTestInstance() { - return randomFieldCaps(); + return randomFieldCaps(FIELD_NAME); } @Override @@ -82,7 +92,7 @@ public class FieldCapabilitiesTests extends AbstractWireSerializingTestCase("test", null, null, null, - Collections.singletonMap("foo", new AliasFilter(new MatchAllQueryBuilder())), Collections.singletonMap("foo", 2.0f), null, - request, null, new GroupShardsIterator<>(Collections.singletonList( - new SearchShardIterator(null, null, Collections.emptyList(), null))), timeProvider, 0, null, + Collections.singletonMap("foo", new AliasFilter(new MatchAllQueryBuilder())), Collections.singletonMap("foo", 2.0f), + Collections.singletonMap("name", Sets.newHashSet("bar", "baz")),null, request, null, + new GroupShardsIterator<>( + Collections.singletonList( + new SearchShardIterator(null, null, Collections.emptyList(), null) + ) + ), timeProvider, 0, null, new InitialSearchPhase.ArraySearchPhaseResults<>(10), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY) { @Override @@ -117,5 +123,8 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices()); assertEquals(new MatchAllQueryBuilder(), shardSearchTransportRequest.getAliasFilter().getQueryBuilder()); assertEquals(2.0f, shardSearchTransportRequest.indexBoost(), 0.0f); + assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices()); + assertArrayEquals(new String[] {"bar", "baz"}, shardSearchTransportRequest.indexRoutings()); + assertEquals("_shards:1,3", shardSearchTransportRequest.preference()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index d60f29a5d53..8b174196773 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -78,12 +78,12 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { 2, randomBoolean(), primaryNode, replicaNode); final SearchRequest searchRequest = new SearchRequest(); searchRequest.allowPartialSearchResults(true); - + CanMatchPreFilterSearchPhase canMatchPhase = new CanMatchPreFilterSearchPhase(logger, searchTransportService, (clusterAlias, node) -> lookup.get(node), Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)), - Collections.emptyMap(), EsExecutors.newDirectExecutorService(), + Collections.emptyMap(), Collections.emptyMap(), EsExecutors.newDirectExecutorService(), searchRequest, null, shardsIter, timeProvider, 0, null, (iter) -> new SearchPhase("test") { @Override @@ -159,12 +159,12 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { final SearchRequest searchRequest = new SearchRequest(); searchRequest.allowPartialSearchResults(true); - + CanMatchPreFilterSearchPhase canMatchPhase = new CanMatchPreFilterSearchPhase(logger, searchTransportService, (clusterAlias, node) -> lookup.get(node), Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)), - Collections.emptyMap(), EsExecutors.newDirectExecutorService(), + Collections.emptyMap(), Collections.emptyMap(), EsExecutors.newDirectExecutorService(), searchRequest, null, shardsIter, timeProvider, 0, null, (iter) -> new SearchPhase("test") { @Override @@ -222,6 +222,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { (clusterAlias, node) -> lookup.get(node), Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)), Collections.emptyMap(), + Collections.emptyMap(), EsExecutors.newDirectExecutorService(), searchRequest, null, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index c731d1aaabe..82e0fcaf5d6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -106,6 +106,7 @@ public class SearchAsyncActionTests extends ESTestCase { return lookup.get(node); }, aliasFilters, Collections.emptyMap(), + Collections.emptyMap(), null, request, responseListener, @@ -198,6 +199,7 @@ public class SearchAsyncActionTests extends ESTestCase { return lookup.get(node); }, aliasFilters, Collections.emptyMap(), + Collections.emptyMap(), null, request, responseListener, @@ -303,6 +305,7 @@ public class SearchAsyncActionTests extends ESTestCase { return lookup.get(node); }, aliasFilters, Collections.emptyMap(), + Collections.emptyMap(), executor, request, responseListener, diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java index 65526896864..8a9b00a8d4f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java @@ -83,7 +83,7 @@ public class PrimaryTermsTests extends ESAllocationTestCase { } /** - * puts primary shard routings into initializing state + * puts primary shard indexRoutings into initializing state */ private void initPrimaries() { logger.info("adding {} nodes and performing rerouting", this.numberOfReplicas + 1); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 055adbaebbc..349997d7793 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -83,7 +83,7 @@ public class RoutingTableTests extends ESAllocationTestCase { } /** - * puts primary shard routings into initializing state + * puts primary shard indexRoutings into initializing state */ private void initPrimaries() { logger.info("adding {} nodes and performing rerouting", this.numberOfReplicas + 1); diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 4dc6a859a5c..28fa440d96a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TopDocs; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -131,16 +130,16 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertTrue(indexService.getRefreshTask().mustReschedule()); // now disable - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); - indexService.updateMetaData(metaData); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).get(); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); assertFalse(refreshTask.isScheduled()); assertFalse(indexService.getRefreshTask().mustReschedule()); // set it to 100ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "100ms")).build(); - indexService.updateMetaData(metaData); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "100ms")).get(); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -150,8 +149,8 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(100, refreshTask.getInterval().millis()); // set it to 200ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); - indexService.updateMetaData(metaData); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).get(); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -161,8 +160,8 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(200, refreshTask.getInterval().millis()); // set it to 200ms again - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); - indexService.updateMetaData(metaData); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).get(); assertSame(refreshTask, indexService.getRefreshTask()); assertTrue(indexService.getRefreshTask().mustReschedule()); assertTrue(refreshTask.isScheduled()); @@ -174,7 +173,9 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testFsyncTaskIsRunning() throws IOException { - IndexService indexService = createIndex("test", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC).build()); + Settings settings = Settings.builder() + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC).build(); + IndexService indexService = createIndex("test", settings); IndexService.AsyncTranslogFSync fsyncTask = indexService.getFsyncTask(); assertNotNull(fsyncTask); assertEquals(5000, fsyncTask.getInterval().millis()); @@ -198,12 +199,10 @@ public class IndexServiceTests extends ESSingleNodeTestCase { IndexShard shard = indexService.getShard(0); client().prepareIndex("test", "test", "0").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); // now disable the refresh - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()) - .settings(Settings.builder().put(indexService.getMetaData().getSettings()) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).get(); // when we update we reschedule the existing task AND fire off an async refresh to make sure we make everything visible // before that this is why we need to wait for the refresh task to be unscheduled and the first doc to be visible - indexService.updateMetaData(metaData); assertTrue(refreshTask.isClosed()); refreshTask = indexService.getRefreshTask(); assertBusy(() -> { @@ -217,10 +216,8 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertFalse(refreshTask.isClosed()); // refresh every millisecond client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); - metaData = IndexMetaData.builder(indexService.getMetaData()) - .settings(Settings.builder().put(indexService.getMetaData().getSettings()) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1ms")).build(); - indexService.updateMetaData(metaData); + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1ms")).get(); assertTrue(refreshTask.isClosed()); assertBusy(() -> { // this one becomes visible due to the force refresh we are running on updateMetaData if the interval changes @@ -303,13 +300,11 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertTrue(indexService.getRefreshTask().mustReschedule()); client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").get(); - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder() - .put(indexService.getMetaData().getSettings()) - .put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), -1) - .put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), -1)) - .build(); - indexService.updateMetaData(metaData); - + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), -1) + .put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), -1)) + .get(); IndexShard shard = indexService.getShard(0); assertBusy(() -> assertThat(shard.estimateTranslogOperationsFromMinSeq(0L), equalTo(0))); } diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 6e8e679188c..4ef9c36d930 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -122,6 +122,16 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { return null; } + @Override + public String[] indexRoutings() { + return null; + } + + @Override + public String preference() { + return null; + } + @Override public void setProfile(boolean profile) { diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index ea4b53f16c0..e769485443a 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3724,15 +3724,13 @@ public class InternalEngineTests extends EngineTestCase { noOpEngine.recoverFromTranslog(); final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = randomAlphaOfLength(16); - noOpEngine.noOp( - new Engine.NoOp( - maxSeqNo + 1, - primaryTerm.get(), - randomFrom(PRIMARY, REPLICA, PEER_RECOVERY, LOCAL_TRANSLOG_RECOVERY), - System.nanoTime(), - reason)); + noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason)); assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1))); - assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(1 + gapsFilled)); + assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled)); + noOpEngine.noOp( + new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(), randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason)); + assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 2))); + assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1)); // skip to the op that we added to the translog Translog.Operation op; Translog.Operation last = null; @@ -3744,7 +3742,7 @@ public class InternalEngineTests extends EngineTestCase { assertNotNull(last); assertThat(last, instanceOf(Translog.NoOp.class)); final Translog.NoOp noOp = (Translog.NoOp) last; - assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 1))); + assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 2))); assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get())); assertThat(noOp.reason(), equalTo(reason)); } finally { diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index e445eb14117..b3cd1a361ec 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -59,7 +59,9 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Function; +import java.util.function.Predicate; +import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -85,17 +87,13 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase randomTerms = new ArrayList<>(numValues); - for (int i = 0; i < numValues; i++) { - randomTerms.add(getRandomValueForFieldName(fieldName)); - } + List randomTerms = randomValues(fieldName); TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, randomTerms); if (randomBoolean()) { queryBuilder.setMinimumShouldMatchField("m_s_m"); } else { queryBuilder.setMinimumShouldMatchScript( - new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_script", Collections.emptyMap())); + new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_script", emptyMap())); } return queryBuilder; } @@ -122,6 +120,41 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase values = instance.getValues(); + String minimumShouldMatchField = null; + Script minimumShouldMatchScript = null; + + switch (randomIntBetween(0, 3)) { + case 0: + Predicate predicate = s -> s.equals(instance.getFieldName()) == false && s.equals(GEO_POINT_FIELD_NAME) == false + && s.equals(GEO_SHAPE_FIELD_NAME) == false; + fieldName = randomValueOtherThanMany(predicate, () -> randomFrom(MAPPED_FIELD_NAMES)); + values = randomValues(fieldName); + break; + case 1: + values = randomValues(fieldName); + break; + case 2: + minimumShouldMatchField = randomAlphaOfLengthBetween(1, 10); + break; + case 3: + minimumShouldMatchScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, randomAlphaOfLength(10), emptyMap()); + break; + } + + TermsSetQueryBuilder newInstance = new TermsSetQueryBuilder(fieldName, values); + if (minimumShouldMatchField != null) { + newInstance.setMinimumShouldMatchField(minimumShouldMatchField); + } + if (minimumShouldMatchScript != null) { + newInstance.setMinimumShouldMatchScript(minimumShouldMatchScript); + } + return newInstance; + } + public void testBothFieldAndScriptSpecified() { TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder("_field", Collections.emptyList()); queryBuilder.setMinimumShouldMatchScript(new Script("")); @@ -215,7 +248,7 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase randomValues(final String fieldName) { + final int numValues = randomIntBetween(0, 10); + final List values = new ArrayList<>(numValues); + + for (int i = 0; i < numValues; i++) { + values.add(getRandomValueForFieldName(fieldName)); + } + return values; + } + public static class CustomScriptPlugin extends MockScriptPlugin { @Override diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 9bec4d3cdbf..e945bc12705 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1661,6 +1661,16 @@ public class IndexShardTests extends IndexShardTestCase { IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 1); assertDocCount(shard, 2); + + for (int i = 0; i < 2; i++) { + newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, + RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); + newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); + assertTrue(newShard.recoverFromStore()); + try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { + assertThat(snapshot.totalOperations(), equalTo(2)); + } + } closeShards(newShard, shard); } diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestFieldCapabilitiesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestFieldCapabilitiesActionTests.java new file mode 100644 index 00000000000..b8dd007f567 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/RestFieldCapabilitiesActionTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestFieldCapabilitiesAction; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; + +import static org.mockito.Mockito.mock; + +public class RestFieldCapabilitiesActionTests extends ESTestCase { + + private RestFieldCapabilitiesAction action; + + @Before + public void setUpAction() { + action = new RestFieldCapabilitiesAction(Settings.EMPTY, mock(RestController.class)); + } + + public void testRequestBodyIsDeprecated() throws IOException { + String content = "{ \"fields\": [\"title\"] }"; + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withPath("/_field_caps") + .withContent(new BytesArray(content), XContentType.JSON) + .build(); + action.prepareRequest(request, mock(NodeClient.class)); + + assertWarnings("Specifying a request body is deprecated -- the" + + " [fields] request parameter should be used instead."); + } +} diff --git a/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java index 14ec800c3a6..6b3b6a67a97 100644 --- a/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -170,7 +170,7 @@ public class AliasRoutingIT extends ESIntegTestCase { assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(1L)); } - logger.info("--> search with 0,1 routings , should find two"); + logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); diff --git a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index 84caed948a2..0a2a43f2f83 100644 --- a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -173,13 +173,13 @@ public class SimpleRoutingIT extends ESIntegTestCase { assertThat(client().prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(1L)); } - logger.info("--> search with {},{} routings , should find two", routingValue, "1"); + logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { assertThat(client().prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); assertThat(client().prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); } - logger.info("--> search with {},{},{} routings , should find two", routingValue, secondRoutingValue, routingValue); + logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { assertThat(client().prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); assertThat(client().prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue,routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(2L)); diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index ec422435e4e..f59cc85c09c 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -112,8 +112,8 @@ public class DefaultSearchContextTests extends ESTestCase { IndexReader reader = w.getReader(); Engine.Searcher searcher = new Engine.Searcher("test", new IndexSearcher(reader))) { - DefaultSearchContext context1 = new DefaultSearchContext(1L, shardSearchRequest, null, searcher, indexService, - indexShard, bigArrays, null, timeout, null, null); + DefaultSearchContext context1 = new DefaultSearchContext(1L, shardSearchRequest, null, searcher, null, indexService, + indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); context1.from(300); // resultWindow greater than maxResultWindow and scrollContext is null @@ -153,8 +153,8 @@ public class DefaultSearchContextTests extends ESTestCase { + "] index level setting.")); // rescore is null but sliceBuilder is not null - DefaultSearchContext context2 = new DefaultSearchContext(2L, shardSearchRequest, null, searcher, indexService, - indexShard, bigArrays, null, timeout, null, null); + DefaultSearchContext context2 = new DefaultSearchContext(2L, shardSearchRequest, null, searcher, + null, indexService, indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); SliceBuilder sliceBuilder = mock(SliceBuilder.class); int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100); @@ -170,8 +170,8 @@ public class DefaultSearchContextTests extends ESTestCase { when(shardSearchRequest.getAliasFilter()).thenReturn(AliasFilter.EMPTY); when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST); - DefaultSearchContext context3 = new DefaultSearchContext(3L, shardSearchRequest, null, searcher, indexService, - indexShard, bigArrays, null, timeout, null, null); + DefaultSearchContext context3 = new DefaultSearchContext(3L, shardSearchRequest, null, searcher, null, + indexService, indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index f5552ee0d2e..c58a158fc67 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -213,7 +213,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { SearchPhaseResult searchPhaseResult = service.executeQueryPhase( new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, - true), + true, null, null), new SearchTask(123L, "", "", "", null, Collections.emptyMap())); IntArrayList intCursors = new IntArrayList(1); intCursors.add(0); @@ -249,7 +249,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, true) + 1.0f, true, null, null) ); try { // the search context should inherit the default timeout @@ -269,7 +269,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, true) + 1.0f, true, null, null) ); try { // the search context should inherit the query timeout @@ -297,12 +297,13 @@ public class SearchServiceTests extends ESSingleNodeTestCase { searchSourceBuilder.docValueField("field" + i); } try (SearchContext context = service.createContext(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, - searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true))) { + searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true, null, null))) { assertNotNull(context); searchSourceBuilder.docValueField("one_field_too_much"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> service.createContext(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, - searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true))); + searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, + true, null, null))); assertEquals( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. " + "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", @@ -328,13 +329,14 @@ public class SearchServiceTests extends ESSingleNodeTestCase { new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); } try (SearchContext context = service.createContext(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, - searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true))) { + searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true, null, null))) { assertNotNull(context); searchSourceBuilder.scriptField("anotherScriptField", new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> service.createContext(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, - searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true))); + searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, true, null, null))); assertEquals( "Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was [" + (maxScriptFields + 1) @@ -406,28 +408,28 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final IndexShard indexShard = indexService.getShard(0); final boolean allowPartialSearchResults = true; assertTrue(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, null, - Strings.EMPTY_ARRAY, false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults))); + Strings.EMPTY_ARRAY, false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults, null, null))); assertTrue(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, - new SearchSourceBuilder(), Strings.EMPTY_ARRAY, false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, - allowPartialSearchResults))); + new SearchSourceBuilder(), Strings.EMPTY_ARRAY, false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, + allowPartialSearchResults, null, null))); assertTrue(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, new SearchSourceBuilder().query(new MatchAllQueryBuilder()), Strings.EMPTY_ARRAY, false, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults))); + new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults, null, null))); assertTrue(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new TermsAggregationBuilder("test", ValueType.STRING).minDocCount(0)), Strings.EMPTY_ARRAY, false, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults))); + new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults, null, null))); assertTrue(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new GlobalAggregationBuilder("test")), Strings.EMPTY_ARRAY, false, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults))); + new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults, null, null))); assertFalse(service.canMatch(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.QUERY_THEN_FETCH, new SearchSourceBuilder().query(new MatchNoneQueryBuilder()), Strings.EMPTY_ARRAY, false, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults))); + new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, allowPartialSearchResults, null, null))); } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index c2016ceb02c..21a4f099f5a 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -74,6 +74,8 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { assertEquals(deserializedRequest.searchType(), shardSearchTransportRequest.searchType()); assertEquals(deserializedRequest.shardId(), shardSearchTransportRequest.shardId()); assertEquals(deserializedRequest.numberOfShards(), shardSearchTransportRequest.numberOfShards()); + assertEquals(deserializedRequest.indexRoutings(), shardSearchTransportRequest.indexRoutings()); + assertEquals(deserializedRequest.preference(), shardSearchTransportRequest.preference()); assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey()); assertNotSame(deserializedRequest, shardSearchTransportRequest); assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); @@ -92,8 +94,10 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { } else { filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY); } + final String[] routings = generateRandomStringArray(5, 10, false, true); return new ShardSearchTransportRequest(new OriginalIndices(searchRequest), searchRequest, shardId, - randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()), null); + randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), + Math.abs(randomLong()), null, routings); } public void testFilteringAliases() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index 2227cbb806b..d609f84e419 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.slice; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -48,9 +49,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; public class SearchSliceIT extends ESIntegTestCase { - private static final int NUM_DOCS = 1000; - - private int setupIndex(boolean withDocs) throws IOException, ExecutionException, InterruptedException { + private void setupIndex(int numDocs, int numberOfShards) throws IOException, ExecutionException, InterruptedException { String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject() .startObject("type") @@ -70,74 +69,112 @@ public class SearchSliceIT extends ESIntegTestCase { .endObject() .endObject() .endObject()); - int numberOfShards = randomIntBetween(1, 7); assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) .addMapping("type", mapping, XContentType.JSON)); ensureGreen(); - if (withDocs == false) { - return numberOfShards; - } - List requests = new ArrayList<>(); - for (int i = 0; i < NUM_DOCS; i++) { - XContentBuilder builder = jsonBuilder(); - builder.startObject(); - builder.field("invalid_random_kw", randomAlphaOfLengthBetween(5, 20)); - builder.field("random_int", randomInt()); - builder.field("static_int", 0); - builder.field("invalid_random_int", randomInt()); - builder.endObject(); + for (int i = 0; i < numDocs; i++) { + XContentBuilder builder = jsonBuilder() + .startObject() + .field("invalid_random_kw", randomAlphaOfLengthBetween(5, 20)) + .field("random_int", randomInt()) + .field("static_int", 0) + .field("invalid_random_int", randomInt()) + .endObject(); requests.add(client().prepareIndex("test", "type").setSource(builder)); } indexRandom(true, requests); - return numberOfShards; } - public void testDocIdSort() throws Exception { - int numShards = setupIndex(true); - SearchResponse sr = client().prepareSearch("test") - .setQuery(matchAllQuery()) - .setSize(0) - .get(); - int numDocs = (int) sr.getHits().getTotalHits(); - assertThat(numDocs, equalTo(NUM_DOCS)); - int max = randomIntBetween(2, numShards*3); + public void testSearchSort() throws Exception { + int numShards = randomIntBetween(1, 7); + int numDocs = randomIntBetween(100, 1000); + setupIndex(numDocs, numShards); + int max = randomIntBetween(2, numShards * 3); for (String field : new String[]{"_id", "random_int", "static_int"}) { int fetchSize = randomIntBetween(10, 100); + // test _doc sort SearchRequestBuilder request = client().prepareSearch("test") .setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); - assertSearchSlicesWithScroll(request, field, max); - } - } + assertSearchSlicesWithScroll(request, field, max, numDocs); - public void testNumericSort() throws Exception { - int numShards = setupIndex(true); - SearchResponse sr = client().prepareSearch("test") - .setQuery(matchAllQuery()) - .setSize(0) - .get(); - int numDocs = (int) sr.getHits().getTotalHits(); - assertThat(numDocs, equalTo(NUM_DOCS)); - - int max = randomIntBetween(2, numShards*3); - for (String field : new String[]{"_id", "random_int", "static_int"}) { - int fetchSize = randomIntBetween(10, 100); - SearchRequestBuilder request = client().prepareSearch("test") + // test numeric sort + request = client().prepareSearch("test") .setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .addSort(SortBuilders.fieldSort("random_int")) .setSize(fetchSize); - assertSearchSlicesWithScroll(request, field, max); + assertSearchSlicesWithScroll(request, field, max, numDocs); + } + } + + public void testWithPreferenceAndRoutings() throws Exception { + int numShards = 10; + int totalDocs = randomIntBetween(100, 1000); + setupIndex(totalDocs, numShards); + { + SearchResponse sr = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setPreference("_shards:1,4") + .setSize(0) + .get(); + int numDocs = (int) sr.getHits().getTotalHits(); + int max = randomIntBetween(2, numShards * 3); + int fetchSize = randomIntBetween(10, 100); + SearchRequestBuilder request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .setSize(fetchSize) + .setPreference("_shards:1,4") + .addSort(SortBuilders.fieldSort("_doc")); + assertSearchSlicesWithScroll(request, "_id", max, numDocs); + } + { + SearchResponse sr = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setRouting("foo", "bar") + .setSize(0) + .get(); + int numDocs = (int) sr.getHits().getTotalHits(); + int max = randomIntBetween(2, numShards * 3); + int fetchSize = randomIntBetween(10, 100); + SearchRequestBuilder request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .setSize(fetchSize) + .setRouting("foo", "bar") + .addSort(SortBuilders.fieldSort("_doc")); + assertSearchSlicesWithScroll(request, "_id", max, numDocs); + } + { + assertAcked(client().admin().indices().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) + .get()); + SearchResponse sr = client().prepareSearch("alias1", "alias3") + .setQuery(matchAllQuery()) + .setSize(0) + .get(); + int numDocs = (int) sr.getHits().getTotalHits(); + int max = randomIntBetween(2, numShards * 3); + int fetchSize = randomIntBetween(10, 100); + SearchRequestBuilder request = client().prepareSearch("alias1", "alias3") + .setQuery(matchAllQuery()) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("_doc")); + assertSearchSlicesWithScroll(request, "_id", max, numDocs); } } public void testInvalidFields() throws Exception { - setupIndex(false); + setupIndex(0, 1); SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test") .setQuery(matchAllQuery()) @@ -161,7 +198,7 @@ public class SearchSliceIT extends ESIntegTestCase { } public void testInvalidQuery() throws Exception { - setupIndex(false); + setupIndex(0, 1); SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch() .setQuery(matchAllQuery()) @@ -173,7 +210,7 @@ public class SearchSliceIT extends ESIntegTestCase { equalTo("`slice` cannot be used outside of a scroll context")); } - private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice) { + private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice, int numDocs) { int totalResults = 0; List keys = new ArrayList<>(); for (int id = 0; id < numSlice; id++) { @@ -184,7 +221,7 @@ public class SearchSliceIT extends ESIntegTestCase { int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { - keys.add(hit.getId()); + assertTrue(keys.add(hit.getId())); } while (searchResponse.getHits().getHits().length > 0) { searchResponse = client().prepareSearchScroll("test") @@ -195,15 +232,15 @@ public class SearchSliceIT extends ESIntegTestCase { totalResults += searchResponse.getHits().getHits().length; numSliceResults += searchResponse.getHits().getHits().length; for (SearchHit hit : searchResponse.getHits().getHits()) { - keys.add(hit.getId()); + assertTrue(keys.add(hit.getId())); } } assertThat(numSliceResults, equalTo(expectedSliceResults)); clearScroll(scrollId); } - assertThat(totalResults, equalTo(NUM_DOCS)); - assertThat(keys.size(), equalTo(NUM_DOCS)); - assertThat(new HashSet(keys).size(), equalTo(NUM_DOCS)); + assertThat(totalResults, equalTo(numDocs)); + assertThat(keys.size(), equalTo(numDocs)); + assertThat(new HashSet(keys).size(), equalTo(numDocs)); } private Throwable findRootCause(Exception e) { diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 75802e92ee1..b93ebc1adde 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -30,19 +30,38 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.Version; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.search.SearchShardIterator; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.OperationRouting; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.Rewriteable; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -58,13 +77,138 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashC import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class SliceBuilderTests extends ESTestCase { private static final int MAX_SLICE = 20; - private static SliceBuilder randomSliceBuilder() throws IOException { + static class ShardSearchRequestTest implements IndicesRequest, ShardSearchRequest { + private final String[] indices; + private final int shardId; + private final String[] indexRoutings; + private final String preference; + + ShardSearchRequestTest(String index, int shardId, String[] indexRoutings, String preference) { + this.indices = new String[] { index }; + this.shardId = shardId; + this.indexRoutings = indexRoutings; + this.preference = preference; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return null; + } + + @Override + public ShardId shardId() { + return new ShardId(new Index(indices[0], indices[0]), shardId); + } + + @Override + public String[] types() { + return new String[0]; + } + + @Override + public SearchSourceBuilder source() { + return null; + } + + @Override + public AliasFilter getAliasFilter() { + return null; + } + + @Override + public void setAliasFilter(AliasFilter filter) { + + } + + @Override + public void source(SearchSourceBuilder source) { + + } + + @Override + public int numberOfShards() { + return 0; + } + + @Override + public SearchType searchType() { + return null; + } + + @Override + public float indexBoost() { + return 0; + } + + @Override + public long nowInMillis() { + return 0; + } + + @Override + public Boolean requestCache() { + return null; + } + + @Override + public Boolean allowPartialSearchResults() { + return null; + } + + @Override + public Scroll scroll() { + return null; + } + + @Override + public String[] indexRoutings() { + return indexRoutings; + } + + @Override + public String preference() { + return preference; + } + + @Override + public void setProfile(boolean profile) { + + } + + @Override + public boolean isProfile() { + return false; + } + + @Override + public BytesReference cacheKey() throws IOException { + return null; + } + + @Override + public String getClusterAlias() { + return null; + } + + @Override + public Rewriteable getRewriteable() { + return null; + } + } + + private static SliceBuilder randomSliceBuilder() { int max = randomIntBetween(2, MAX_SLICE); int id = randomIntBetween(1, max - 1); String field = randomAlphaOfLengthBetween(5, 20); @@ -75,7 +219,7 @@ public class SliceBuilderTests extends ESTestCase { return copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), SliceBuilder::new); } - private static SliceBuilder mutate(SliceBuilder original) throws IOException { + private static SliceBuilder mutate(SliceBuilder original) { switch (randomIntBetween(0, 2)) { case 0: return new SliceBuilder(original.getField() + "_xyz", original.getId(), original.getMax()); case 1: return new SliceBuilder(original.getField(), original.getId() - 1, original.getMax()); @@ -84,6 +228,63 @@ public class SliceBuilderTests extends ESTestCase { } } + private IndexSettings createIndexSettings(Version indexVersionCreated, int numShards) { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData indexState = IndexMetaData.builder("index").settings(settings).build(); + return new IndexSettings(indexState, Settings.EMPTY); + } + + private ShardSearchRequest createRequest(int shardId) { + return createRequest(shardId, Strings.EMPTY_ARRAY, null); + } + + private ShardSearchRequest createRequest(int shardId, String[] routings, String preference) { + return new ShardSearchRequestTest("index", shardId, routings, preference); + } + + private QueryShardContext createShardContext(Version indexVersionCreated, IndexReader reader, + String fieldName, DocValuesType dvType, int numShards, int shardId) { + MappedFieldType fieldType = new MappedFieldType() { + @Override + public MappedFieldType clone() { + return null; + } + + @Override + public String typeName() { + return null; + } + + @Override + public Query termQuery(Object value, @Nullable QueryShardContext context) { + return null; + } + + public Query existsQuery(QueryShardContext context) { + return null; + } + }; + fieldType.setName(fieldName); + QueryShardContext context = mock(QueryShardContext.class); + when(context.fieldMapper(fieldName)).thenReturn(fieldType); + when(context.getIndexReader()).thenReturn(reader); + when(context.getShardId()).thenReturn(shardId); + IndexSettings indexSettings = createIndexSettings(indexVersionCreated, numShards); + when(context.getIndexSettings()).thenReturn(indexSettings); + if (dvType != null) { + fieldType.setHasDocValues(true); + fieldType.setDocValuesType(dvType); + IndexNumericFieldData fd = mock(IndexNumericFieldData.class); + when(context.getForField(fieldType)).thenReturn(fd); + } + return context; + + } + public void testSerialization() throws Exception { SliceBuilder original = randomSliceBuilder(); SliceBuilder deserialized = serializedCopy(original); @@ -131,92 +332,41 @@ public class SliceBuilderTests extends ESTestCase { assertEquals("max must be greater than id", e.getMessage()); } - public void testToFilter() throws IOException { + public void testToFilterSimple() throws IOException { Directory dir = new RAMDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } - QueryShardContext context = mock(QueryShardContext.class); try (IndexReader reader = DirectoryReader.open(dir)) { - MappedFieldType fieldType = new MappedFieldType() { - @Override - public MappedFieldType clone() { - return null; - } - - @Override - public String typeName() { - return null; - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - return null; - } - - public Query existsQuery(QueryShardContext context) { - return null; - } - }; - fieldType.setName(IdFieldMapper.NAME); - fieldType.setHasDocValues(false); - when(context.fieldMapper(IdFieldMapper.NAME)).thenReturn(fieldType); - when(context.getIndexReader()).thenReturn(reader); - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - IndexMetaData indexState = IndexMetaData.builder("index").settings(settings).build(); - IndexSettings indexSettings = new IndexSettings(indexState, Settings.EMPTY); - when(context.getIndexSettings()).thenReturn(indexSettings); + QueryShardContext context = + createShardContext(Version.CURRENT, reader, "_id", DocValuesType.SORTED_NUMERIC, 1,0); SliceBuilder builder = new SliceBuilder(5, 10); - Query query = builder.toFilter(context, 0, 1); + Query query = builder.toFilter(null, createRequest(0), context, Version.CURRENT); assertThat(query, instanceOf(TermsSliceQuery.class)); - assertThat(builder.toFilter(context, 0, 1), equalTo(query)); + assertThat(builder.toFilter(null, createRequest(0), context, Version.CURRENT), equalTo(query)); try (IndexReader newReader = DirectoryReader.open(dir)) { when(context.getIndexReader()).thenReturn(newReader); - assertThat(builder.toFilter(context, 0, 1), equalTo(query)); + assertThat(builder.toFilter(null, createRequest(0), context, Version.CURRENT), equalTo(query)); } } + } + public void testToFilterRandom() throws IOException { + Directory dir = new RAMDirectory(); + try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { + writer.commit(); + } try (IndexReader reader = DirectoryReader.open(dir)) { - MappedFieldType fieldType = new MappedFieldType() { - @Override - public MappedFieldType clone() { - return null; - } - - @Override - public String typeName() { - return null; - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - return null; - } - - public Query existsQuery(QueryShardContext context) { - return null; - } - }; - fieldType.setName("field_doc_values"); - fieldType.setHasDocValues(true); - fieldType.setDocValuesType(DocValuesType.SORTED_NUMERIC); - when(context.fieldMapper("field_doc_values")).thenReturn(fieldType); - when(context.getIndexReader()).thenReturn(reader); - IndexNumericFieldData fd = mock(IndexNumericFieldData.class); - when(context.getForField(fieldType)).thenReturn(fd); - SliceBuilder builder = new SliceBuilder("field_doc_values", 5, 10); - Query query = builder.toFilter(context, 0, 1); + QueryShardContext context = + createShardContext(Version.CURRENT, reader, "field", DocValuesType.SORTED_NUMERIC, 1,0); + SliceBuilder builder = new SliceBuilder("field", 5, 10); + Query query = builder.toFilter(null, createRequest(0), context, Version.CURRENT); assertThat(query, instanceOf(DocValuesSliceQuery.class)); - - assertThat(builder.toFilter(context, 0, 1), equalTo(query)); + assertThat(builder.toFilter(null, createRequest(0), context, Version.CURRENT), equalTo(query)); try (IndexReader newReader = DirectoryReader.open(dir)) { when(context.getIndexReader()).thenReturn(newReader); - assertThat(builder.toFilter(context, 0, 1), equalTo(query)); + assertThat(builder.toFilter(null, createRequest(0), context, Version.CURRENT), equalTo(query)); } // numSlices > numShards @@ -226,7 +376,8 @@ public class SliceBuilderTests extends ESTestCase { for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_id", i, numSlices); - Query q = slice.toFilter(context, j, numShards); + context = createShardContext(Version.CURRENT, reader, "_id", DocValuesType.SORTED, numShards, j); + Query q = slice.toFilter(null, createRequest(j), context, Version.CURRENT); if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) { AtomicInteger count = numSliceMap.get(j); if (count == null) { @@ -250,12 +401,13 @@ public class SliceBuilderTests extends ESTestCase { // numShards > numSlices numShards = randomIntBetween(4, 100); - numSlices = randomIntBetween(2, numShards-1); + numSlices = randomIntBetween(2, numShards - 1); List targetShards = new ArrayList<>(); for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_id", i, numSlices); - Query q = slice.toFilter(context, j, numShards); + context = createShardContext(Version.CURRENT, reader, "_id", DocValuesType.SORTED, numShards, j); + Query q = slice.toFilter(null, createRequest(j), context, Version.CURRENT); if (q instanceof MatchNoDocsQuery == false) { assertThat(q, instanceOf(MatchAllDocsQuery.class)); targetShards.add(j); @@ -271,7 +423,8 @@ public class SliceBuilderTests extends ESTestCase { for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_id", i, numSlices); - Query q = slice.toFilter(context, j, numShards); + context = createShardContext(Version.CURRENT, reader, "_id", DocValuesType.SORTED, numShards, j); + Query q = slice.toFilter(null, createRequest(j), context, Version.CURRENT); if (i == j) { assertThat(q, instanceOf(MatchAllDocsQuery.class)); } else { @@ -280,85 +433,35 @@ public class SliceBuilderTests extends ESTestCase { } } } + } + public void testInvalidField() throws IOException { + Directory dir = new RAMDirectory(); + try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { + writer.commit(); + } try (IndexReader reader = DirectoryReader.open(dir)) { - MappedFieldType fieldType = new MappedFieldType() { - @Override - public MappedFieldType clone() { - return null; - } - - @Override - public String typeName() { - return null; - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - return null; - } - - public Query existsQuery(QueryShardContext context) { - return null; - } - }; - fieldType.setName("field_without_doc_values"); - when(context.fieldMapper("field_without_doc_values")).thenReturn(fieldType); - when(context.getIndexReader()).thenReturn(reader); - SliceBuilder builder = new SliceBuilder("field_without_doc_values", 5, 10); - IllegalArgumentException exc = - expectThrows(IllegalArgumentException.class, () -> builder.toFilter(context, 0, 1)); + QueryShardContext context = createShardContext(Version.CURRENT, reader, "field", null, 1,0); + SliceBuilder builder = new SliceBuilder("field", 5, 10); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> builder.toFilter(null, createRequest(0), context, Version.CURRENT)); assertThat(exc.getMessage(), containsString("cannot load numeric doc values")); } } - public void testToFilterDeprecationMessage() throws IOException { Directory dir = new RAMDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } - QueryShardContext context = mock(QueryShardContext.class); try (IndexReader reader = DirectoryReader.open(dir)) { - MappedFieldType fieldType = new MappedFieldType() { - @Override - public MappedFieldType clone() { - return null; - } - - @Override - public String typeName() { - return null; - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - return null; - } - - public Query existsQuery(QueryShardContext context) { - return null; - } - }; - fieldType.setName("_uid"); - fieldType.setHasDocValues(false); - when(context.fieldMapper("_uid")).thenReturn(fieldType); - when(context.getIndexReader()).thenReturn(reader); - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - IndexMetaData indexState = IndexMetaData.builder("index").settings(settings).build(); - IndexSettings indexSettings = new IndexSettings(indexState, Settings.EMPTY); - when(context.getIndexSettings()).thenReturn(indexSettings); + QueryShardContext context = createShardContext(Version.V_6_3_0, reader, "_uid", null, 1,0); SliceBuilder builder = new SliceBuilder("_uid", 5, 10); - Query query = builder.toFilter(context, 0, 1); + Query query = builder.toFilter(null, createRequest(0), context, Version.CURRENT); assertThat(query, instanceOf(TermsSliceQuery.class)); - assertThat(builder.toFilter(context, 0, 1), equalTo(query)); + assertThat(builder.toFilter(null, createRequest(0), context, Version.CURRENT), equalTo(query)); assertWarnings("Computing slices on the [_uid] field is deprecated for 6.x indices, use [_id] instead"); } - } public void testSerializationBackcompat() throws IOException { @@ -375,4 +478,35 @@ public class SliceBuilderTests extends ESTestCase { SliceBuilder::new, Version.V_6_3_0); assertEquals(sliceBuilder, copy63); } + + public void testToFilterWithRouting() throws IOException { + Directory dir = new RAMDirectory(); + try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { + writer.commit(); + } + ClusterService clusterService = mock(ClusterService.class); + ClusterState state = mock(ClusterState.class); + when(state.metaData()).thenReturn(MetaData.EMPTY_META_DATA); + when(clusterService.state()).thenReturn(state); + OperationRouting routing = mock(OperationRouting.class); + GroupShardsIterator it = new GroupShardsIterator<>( + Collections.singletonList( + new SearchShardIterator(null, new ShardId("index", "index", 1), null, null) + ) + ); + when(routing.searchShards(any(), any(), any(), any())).thenReturn(it); + when(clusterService.operationRouting()).thenReturn(routing); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + try (IndexReader reader = DirectoryReader.open(dir)) { + QueryShardContext context = createShardContext(Version.CURRENT, reader, "field", DocValuesType.SORTED, 5, 0); + SliceBuilder builder = new SliceBuilder("field", 6, 10); + String[] routings = new String[] { "foo" }; + Query query = builder.toFilter(clusterService, createRequest(1, routings, null), context, Version.CURRENT); + assertEquals(new DocValuesSliceQuery("field", 6, 10), query); + query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context, Version.CURRENT); + assertEquals(new DocValuesSliceQuery("field", 6, 10), query); + query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context, Version.V_6_2_0); + assertEquals(new DocValuesSliceQuery("field", 1, 2), query); + } + } } diff --git a/x-pack/build.gradle b/x-pack/build.gradle index aa3cc1de17f..88e09d82168 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -23,10 +23,8 @@ subprojects { ext.licenseName = 'Elastic License' ext.licenseUrl = "https://raw.githubusercontent.com/elastic/elasticsearch/${licenseCommit}/licenses/ELASTIC-LICENSE.txt" - plugins.withType(BuildPlugin).whenPluginAdded { - project.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') - project.noticeFile = xpackRootProject.file('NOTICE.txt') - } + project.ext.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') + project.ext.noticeFile = xpackRootProject.file('NOTICE.txt') plugins.withType(PluginBuildPlugin).whenPluginAdded { project.esplugin.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') diff --git a/x-pack/docs/en/setup/docker.asciidoc b/x-pack/docs/en/setup/docker.asciidoc index c245f9f181c..a6ee49bc997 100644 --- a/x-pack/docs/en/setup/docker.asciidoc +++ b/x-pack/docs/en/setup/docker.asciidoc @@ -3,8 +3,7 @@ === Install {es} with Docker {es} is also available as Docker images. -The images use https://hub.docker.com/_/centos/[centos:7] as the base image and -are available with {xpack-ref}/xpack-introduction.html[X-Pack]. +The images use https://hub.docker.com/_/centos/[centos:7] as the base image. A list of all published Docker images and tags can be found in https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found @@ -12,28 +11,19 @@ on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. ==== Image types -The images are available in three different configurations or "flavors". The -`basic` flavor, which is the default, ships with {xpack} Basic features -pre-installed and automatically activated with a free licence. The `platinum` -flavor features all {xpack} functionally under a 30-day trial licence. The `oss` -flavor does not include {xpack}, and contains only open-source {es}. +These images are free to use under the Elastic license. They contain open source +and free commercial features and access to paid commercial features. +{xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the +paid commercial features. See the +https://www.elastic.co/subscriptions[Subscriptions] page for information about +Elastic license levels. -NOTE: {xpack-ref}/xpack-security.html[X-Pack Security] is enabled in the `platinum` -image. To access your cluster, it's necessary to set an initial password for the -`elastic` user. The initial password can be set at start up time via the -`ELASTIC_PASSWORD` environment variable: +Alternatively, you can download `-oss` images, which contain only features that +are available under the Apache 2.0 license. -["source","txt",subs="attributes"] --------------------------------------------- -docker run -e ELASTIC_PASSWORD=MagicWord {docker-repo}-platinum:{version} --------------------------------------------- +==== Pulling the image -NOTE: The `platinum` image includes a trial license for 30 days. After that, you -can obtain one of the https://www.elastic.co/subscriptions[available -subscriptions] or revert to a Basic licence. The Basic license is free and -includes a selection of {xpack} features. - -Obtaining {Es} for Docker is as simple as issuing a +docker pull+ command +Obtaining {es} for Docker is as simple as issuing a +docker pull+ command against the Elastic Docker registry. ifeval::["{release-state}"=="unreleased"] diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index d6c99e96eb8..d579fb4481d 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -31,6 +31,20 @@ configurations { task testJar(type: Jar) { appendix 'test' from sourceSets.test.output + /* + * Stick the license and notice file in the jar. This isn't strictly + * needed because we don't publish it but it makes our super-paranoid + * tests happy. + */ + metaInf { + from(project.licenseFile.parent) { + include project.licenseFile.name + rename { 'LICENSE.txt' } + } + from(project.noticeFile.parent) { + include project.noticeFile.name + } + } } artifacts { testArtifacts testJar diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java index 510540468a4..2ac82c8d7c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java @@ -3,23 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -/* -* ELASTICSEARCH CONFIDENTIAL -* __________________ -* -* [2017] Elasticsearch Incorporated. All Rights Reserved. -* -* NOTICE: All information contained herein is, and remains -* the property of Elasticsearch Incorporated and its suppliers, -* if any. The intellectual and technical concepts contained -* herein are proprietary to Elasticsearch Incorporated -* and its suppliers and may be covered by U.S. and Foreign Patents, -* patents in process, and are protected by trade secret or copyright law. -* Dissemination of this information or reproduction of this material -* is strictly forbidden unless prior written permission is obtained -* from Elasticsearch Incorporated. -*/ - package org.elasticsearch.xpack.security.authc.support.mapper; import org.elasticsearch.common.ParsingException; diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 9eae21fb18a..06eb24c743a 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -45,9 +45,12 @@ dependencyLicenses { * can be easilly shipped around and used. */ jar { - from { + from({ configurations.compile.collect { it.isDirectory() ? it : zipTree(it) } configurations.runtime.collect { it.isDirectory() ? it : zipTree(it) } + }) { + // We don't need the META-INF from the things we bundle. For now. + exclude 'META-INF/*' } manifest { attributes 'Main-Class': 'org.elasticsearch.xpack.sql.cli.Cli' diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java index b7829a246e0..11068372bcc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java @@ -15,6 +15,7 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.TimeZone; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -24,7 +25,7 @@ import static java.util.Collections.singletonMap; public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase { public static CompositeKeyExtractor randomCompositeKeyExtractor() { - return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomTimeZone()); + return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeTimeZone()); } @Override @@ -58,7 +59,7 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< } public void testExtractDate() { - CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomTimeZone()); + CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeTimeZone()); long millis = System.currentTimeMillis(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList())); @@ -73,4 +74,13 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< SqlIllegalArgumentException exception = expectThrows(SqlIllegalArgumentException.class, () -> extractor.extract(bucket)); assertEquals("Invalid date key returned: " + value, exception.getMessage()); } + + /** + * We need to exclude SystemV/* time zones because they cannot be converted + * back to DateTimeZone which we currently still need to do internally, + * e.g. in bwc serialization and in the extract() method + */ + private static TimeZone randomSafeTimeZone() { + return randomValueOtherThanMany(tz -> tz.getID().startsWith("SystemV"), () -> randomTimeZone()); + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java index 4eb5d15741b..51a16c14a88 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java @@ -3,23 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -/* -* ELASTICSEARCH CONFIDENTIAL -* __________________ -* -* [2017] Elasticsearch Incorporated. All Rights Reserved. -* -* NOTICE: All information contained herein is, and remains -* the property of Elasticsearch Incorporated and its suppliers, -* if any. The intellectual and technical concepts contained -* herein are proprietary to Elasticsearch Incorporated -* and its suppliers and may be covered by U.S. and Foreign Patents, -* patents in process, and are protected by trade secret or copyright law. -* Dissemination of this information or reproduction of this material -* is strictly forbidden unless prior written permission is obtained -* from Elasticsearch Incorporated. -*/ - package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java index b050d370625..1e58a1a5dc2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java @@ -3,23 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -/* -* ELASTICSEARCH CONFIDENTIAL -* __________________ -* -* [2017] Elasticsearch Incorporated. All Rights Reserved. -* -* NOTICE: All information contained herein is, and remains -* the property of Elasticsearch Incorporated and its suppliers, -* if any. The intellectual and technical concepts contained -* herein are proprietary to Elasticsearch Incorporated -* and its suppliers and may be covered by U.S. and Foreign Patents, -* patents in process, and are protected by trade secret or copyright law. -* Dissemination of this information or reproduction of this material -* is strictly forbidden unless prior written permission is obtained -* from Elasticsearch Incorporated. -*/ - package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.xpack.sql.expression.Expression; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 168cda6eeda..c9ad4b3053c 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -39,34 +39,6 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa XPackRestTestHelper.waitForMlTemplates(client()); } - /** - * Enables an HTTP exporter for monitoring so that we can test the production-level exporter (not the local exporter). - * - * The build.gradle file disables data collection, so the expectation is that any monitoring rest tests will use the - * "_xpack/monitoring/_bulk" endpoint to lazily setup the templates on-demand and fill in data without worrying about - * timing. - */ - @Before - public void waitForMonitoring() throws Exception { - final String[] nodes = System.getProperty("tests.rest.cluster").split(","); - final Map settings = new HashMap<>(); - - settings.put("xpack.monitoring.exporters._http.enabled", true); - // only select the last node to avoid getting the "old" node in a mixed cluster - // if we ever randomize the order that the nodes are restarted (or add more nodes), then we need to verify which node we select - settings.put("xpack.monitoring.exporters._http.host", nodes[nodes.length - 1]); - - assertBusy(() -> { - final ClientYamlTestResponse response = - getAdminExecutionContext().callApi("cluster.put_settings", - emptyMap(), - singletonList(singletonMap("transient", settings)), - emptyMap()); - - assertThat(response.evaluate("acknowledged"), is(true)); - }); - } - @Override protected boolean preserveIndicesUponCompletion() { return true; diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/60_monitoring.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/60_monitoring.yml deleted file mode 100644 index a5711cfd46c..00000000000 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/60_monitoring.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- -setup: - - do: - cluster.health: - wait_for_status: yellow - ---- -"Index monitoring data and search on the mixed cluster": - - skip: - version: "all" - reason: "AwaitsFix'ing, see x-pack-elasticsearch #2948" - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "old_cluster" } } } - - match: { hits.total: 2 } - - - do: - xpack.monitoring.bulk: - system_id: "kibana" - system_api_version: "6" - interval: "123456ms" - type: "mixed_cluster" - body: - - '{"index": {}}' - - '{"field": "value_3"}' - - '{"index": {}}' - - '{"field": "value_4"}' - - '{"index": {}}' - - '{"field": "value_5"}' - - - is_false: errors - - - do: - indices.refresh: {} - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "old_cluster" } } } - - match: { hits.total: 2 } - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "mixed_cluster" } } } - - match: { hits.total: 3 } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/60_monitoring.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/60_monitoring.yml deleted file mode 100644 index 5185e6b5c22..00000000000 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/60_monitoring.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -setup: - - do: - cluster.health: - wait_for_status: yellow - ---- -"Index monitoring data and search on the old cluster": - - skip: - version: "all" - reason: "AwaitsFix'ing, see x-pack-elasticsearch #2948" - - do: - xpack.monitoring.bulk: - system_id: "kibana" - system_api_version: "6" - interval: "123456ms" - type: "old_cluster" - body: - - '{"index": {}}' - - '{"field": "value_1"}' - - '{"index": {}}' - - '{"field": "value_2"}' - - - is_false: errors - - - do: - indices.refresh: {} - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "old_cluster" } } } - - match: { hits.total: 2 } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/60_monitoring.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/60_monitoring.yml deleted file mode 100644 index 5d3501f7783..00000000000 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/60_monitoring.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- -setup: - - do: - cluster.health: - wait_for_status: yellow - ---- -"Index monitoring data and search on the upgraded cluster": - - skip: - version: "all" - reason: "AwaitsFix'ing, see x-pack-elasticsearch #2948" - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "old_cluster" } } } - - match: { hits.total: 2 } - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "mixed_cluster" } } } - - match: { hits.total: 3 } - - - do: - xpack.monitoring.bulk: - system_id: "kibana" - system_api_version: "6" - interval: "123456ms" - type: "upgraded_cluster" - body: - - '{"index": {}}' - - '{"field": "value_6"}' - - '{"index": {}}' - - '{"field": "value_7"}' - - '{"index": {}}' - - '{"field": "value_8"}' - - - is_false: errors - - - do: - indices.refresh: {} - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "terms" : { "type": [ "old_cluster", "mixed_cluster" ] } } } - - match: { hits.total: 5 } - - - do: - search: - index: .monitoring-kibana-* - body: { "query": { "term" : { "type": "upgraded_cluster" } } } - - match: { hits.total: 3 } diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/build.gradle b/x-pack/qa/smoke-test-watcher-with-mustache/build.gradle deleted file mode 100644 index ca454f28a87..00000000000 --- a/x-pack/qa/smoke-test-watcher-with-mustache/build.gradle +++ /dev/null @@ -1,14 +0,0 @@ -apply plugin: 'elasticsearch.standalone-rest-test' -apply plugin: 'elasticsearch.rest-test' - -dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') - testCompile project(path: xpackModule('watcher'), configuration: 'runtime') - testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') -} - -integTestCluster { - setting 'xpack.security.enabled', 'false' - setting 'xpack.monitoring.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' -} diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java b/x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java deleted file mode 100644 index 07964e5da9b..00000000000 --- a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.is; - -/** Runs rest tests against external cluster */ -public class WatcherWithMustacheIT extends ESClientYamlSuiteTestCase { - - public WatcherWithMustacheIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } - - @Before - public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES); - assertBusy(() -> { - try { - getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - - for (String template : watcherTemplates) { - ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", - singletonMap("name", template), emptyList(), emptyMap()); - assertThat(templateExistsResponse.getStatusCode(), is(200)); - } - - ClientYamlTestResponse response = - getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); - assertThat(state, is("started")); - } catch (IOException e) { - throw new AssertionError(e); - } - }); - } - - @After - public void stopWatcher() throws Exception { - assertBusy(() -> { - try { - getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = - getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); - assertThat(state, is("stopped")); - } catch (IOException e) { - throw new AssertionError(e); - } - }); - } -} diff --git a/x-pack/qa/smoke-test-watcher-with-painless/build.gradle b/x-pack/qa/smoke-test-watcher-with-painless/build.gradle deleted file mode 100644 index 7e86152e5b3..00000000000 --- a/x-pack/qa/smoke-test-watcher-with-painless/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -apply plugin: 'elasticsearch.standalone-rest-test' -apply plugin: 'elasticsearch.rest-test' - -dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') - testCompile project(path: ':modules:lang-painless', configuration: 'runtime') -} - -integTestCluster { - setting 'xpack.security.enabled', 'false' - setting 'xpack.monitoring.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' -} diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java b/x-pack/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java deleted file mode 100644 index aee36dedbb8..00000000000 --- a/x-pack/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.is; - -/** Runs rest tests against external cluster */ -public class WatcherWithPainlessIT extends ESClientYamlSuiteTestCase { - - public WatcherWithPainlessIT(ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } - - @Before - public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES); - assertBusy(() -> { - try { - getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - - for (String template : watcherTemplates) { - ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", - singletonMap("name", template), emptyList(), emptyMap()); - assertThat(templateExistsResponse.getStatusCode(), is(200)); - } - - ClientYamlTestResponse response = - getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); - assertThat(state, is("started")); - } catch (IOException e) { - throw new AssertionError(e); - } - }); - } - - @After - public void stopWatcher() throws Exception { - assertBusy(() -> { - try { - getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = - getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); - assertThat(state, is("stopped")); - } catch (IOException e) { - throw new AssertionError(e); - } - }); - } -} diff --git a/x-pack/qa/smoke-test-watcher/build.gradle b/x-pack/qa/smoke-test-watcher/build.gradle index 4f62f2866cf..8423ae0d24a 100644 --- a/x-pack/qa/smoke-test-watcher/build.gradle +++ b/x-pack/qa/smoke-test-watcher/build.gradle @@ -9,6 +9,8 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'runtime') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') + testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') + testCompile project(path: ':modules:lang-painless', configuration: 'runtime') } integTestCluster { diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherRestIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherRestIT.java new file mode 100644 index 00000000000..86df8065311 --- /dev/null +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherRestIT.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; +import org.junit.After; +import org.junit.Before; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.is; + +/** Runs rest tests against external cluster */ +public class WatcherRestIT extends ESClientYamlSuiteTestCase { + + public WatcherRestIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + + @Before + public void startWatcher() throws Exception { + assertBusy(() -> { + ClientYamlTestResponse response = + getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); + String state = (String) response.evaluate("stats.0.watcher_state"); + + switch (state) { + case "stopped": + ClientYamlTestResponse startResponse = + getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); + boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); + break; + case "stopping": + throw new AssertionError("waiting until stopping state reached stopped state to start again"); + case "starting": + throw new AssertionError("waiting until starting state reached started state"); + case "started": + // all good here, we are done + break; + default: + throw new AssertionError("unknown state[" + state + "]"); + } + }); + + assertBusy(() -> { + for (String template : WatcherIndexTemplateRegistryField.TEMPLATE_NAMES) { + ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", + singletonMap("name", template), emptyList(), emptyMap()); + assertThat(templateExistsResponse.getStatusCode(), is(200)); + } + }); + } + + @After + public void stopWatcher() throws Exception { + assertBusy(() -> { + ClientYamlTestResponse response = + getAdminExecutionContext().callApi("xpack.watcher.stats", emptyMap(), emptyList(), emptyMap()); + String state = (String) response.evaluate("stats.0.watcher_state"); + + switch (state) { + case "stopped": + // all good here, we are done + break; + case "stopping": + throw new AssertionError("waiting until stopping state reached stopped state"); + case "starting": + throw new AssertionError("waiting until starting state reached started state to stop"); + case "started": + ClientYamlTestResponse stopResponse = + getAdminExecutionContext().callApi("xpack.watcher.stop", emptyMap(), emptyList(), emptyMap()); + boolean isAcknowledged = (boolean) stopResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); + break; + default: + throw new AssertionError("unknown state[" + state + "]"); + } + }); + } +} diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java similarity index 99% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java rename to x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java index ecfc7c0f396..feeea248712 100644 --- a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java @@ -30,7 +30,7 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -public class WatcherTemplateTests extends ESTestCase { +public class WatcherTemplateIT extends ESTestCase { private TextTemplateEngine textTemplateEngine; diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/actions/10_webhook.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/10_webhook.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/actions/10_webhook.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/10_webhook.yml diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/20_array_access.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/20_array_access.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/20_array_access.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/20_array_access.yml diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/25_array_compare.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/25_array_compare.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/25_array_compare.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/25_array_compare.yml diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/30_search_input.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/30_search_input.yml diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/40_search_transform.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/40_search_transform.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/40_search_transform.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/40_search_transform.yml diff --git a/x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/50_webhook_url_escaping.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/50_webhook_url_escaping.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/10_basic.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/10_basic.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/10_basic.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/10_basic.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/20_minimal_body.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/20_minimal_body.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/20_minimal_body.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/20_minimal_body.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/30_inline_watch.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/30_inline_watch.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/30_inline_watch.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/30_inline_watch.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/40_exception.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/40_exception.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/40_exception.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/40_exception.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/50_update_scripts.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/50_update_scripts.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/50_update_scripts.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/50_update_scripts.yml diff --git a/x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/60_chain_input_with_transform.yml b/x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/60_chain_input_with_transform.yml similarity index 100% rename from x-pack/qa/smoke-test-watcher-with-painless/src/test/resources/rest-api-spec/test/watcher_painless/60_chain_input_with_transform.yml rename to x-pack/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/painless/60_chain_input_with_transform.yml diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats b/x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats index 796f4509607..898fedbff79 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats +++ b/x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats @@ -19,6 +19,7 @@ load $BATS_UTILS/xpack.bash setup() { skip_not_tar_gz export ESHOME=/tmp/elasticsearch + export PACKAGE_NAME="elasticsearch" export_elasticsearch_paths export ESPLUGIN_COMMAND_USER=elasticsearch }