diff --git a/NOTICE.txt b/NOTICE.txt index 643a060cd05..f1e3198ab4a 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Elasticsearch -Copyright 2009-2017 Elasticsearch +Copyright 2009-2018 Elasticsearch This product includes software developed by The Apache Software Foundation (http://www.apache.org/). diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 40a8ec230ac..686233bdfe7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -167,7 +167,10 @@ class NodeInfo { String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { - esJavaOpts += " -ea -esa" + // put the enable assertions options before other options to allow + // flexibility to disable assertions for specific packages or classes + // in the cluster-specific options + esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts) } env.put('ES_JAVA_OPTS', esJavaOpts) for (Map.Entry property : System.properties.entrySet()) { diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 11f19f683e5..3b1323e936e 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -56,14 +56,12 @@ - - @@ -77,18 +75,15 @@ - - - @@ -101,7 +96,6 @@ - @@ -112,19 +106,15 @@ - - - - @@ -139,12 +129,10 @@ - - @@ -177,7 +165,6 @@ - @@ -215,7 +202,6 @@ - @@ -314,7 +300,6 @@ - @@ -358,7 +343,6 @@ - @@ -401,7 +385,6 @@ - @@ -439,7 +422,6 @@ - @@ -582,7 +564,6 @@ - @@ -604,7 +585,6 @@ - @@ -733,7 +713,6 @@ - @@ -744,11 +723,9 @@ - - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index e064b2f223c..0f3e6c62c9b 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.3.0-snapshot-98a6b3d +lucene = 7.3.0 # optional dependencies spatial4j = 0.7 diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index 802b1492be0..c8f9725f955 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -544,8 +544,10 @@ public final class Request { static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException { String endpoint = endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval"); + Params params = Params.builder(); + params.withIndicesOptions(rankEvalRequest.indicesOptions()); HttpEntity entity = createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE); - return new Request(HttpGet.METHOD_NAME, endpoint, Collections.emptyMap(), entity); + return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity); } static Request split(ResizeRequest resizeRequest) throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index 7e60e5f169f..9497bdded05 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -21,6 +21,8 @@ package org.elasticsearch.client; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.rankeval.EvalQueryQuality; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -37,8 +39,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map.Entry; -import java.util.Set; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; @@ -55,6 +58,10 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { client().performRequest("PUT", "/index/doc/5", Collections.emptyMap(), doc); client().performRequest("PUT", "/index/doc/6", Collections.emptyMap(), doc); client().performRequest("POST", "/index/_refresh"); + + // add another index to test basic multi index support + client().performRequest("PUT", "/index2/doc/7", Collections.emptyMap(), doc); + client().performRequest("POST", "/index2/_refresh"); } /** @@ -64,7 +71,9 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { public void testRankEvalRequest() throws IOException { SearchSourceBuilder testQuery = new SearchSourceBuilder(); testQuery.query(new MatchAllQueryBuilder()); - RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", createRelevant("index" , "2", "3", "4", "5"), testQuery); + List amsterdamRatedDocs = createRelevant("index" , "2", "3", "4", "5"); + amsterdamRatedDocs.addAll(createRelevant("index2", "7")); + RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery); RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "1"), testQuery); List specifications = new ArrayList<>(); specifications.add(amsterdamRequest); @@ -72,49 +81,46 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec spec = new RankEvalSpec(specifications, metric); - RankEvalResponse response = execute(new RankEvalRequest(spec, new String[] { "index" }), highLevelClient()::rankEval, + RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, new String[] { "index", "index2" }); + RankEvalResponse response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); - // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the second is 1/6, divided by 2 to get the average - double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0; + // the expected Prec@ for the first query is 5/7 and the expected Prec@ for the second is 1/7, divided by 2 to get the average + double expectedPrecision = (1.0 / 7.0 + 5.0 / 7.0) / 2.0; assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE); - Set> entrySet = response.getPartialResults().entrySet(); - assertEquals(2, entrySet.size()); - for (Entry entry : entrySet) { - EvalQueryQuality quality = entry.getValue(); - if (entry.getKey() == "amsterdam_query") { - assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size()); - List hitsAndRatings = quality.getHitsAndRatings(); - assertEquals(6, hitsAndRatings.size()); - for (RatedSearchHit hit : hitsAndRatings) { - String id = hit.getSearchHit().getId(); - if (id.equals("1") || id.equals("6")) { - assertFalse(hit.getRating().isPresent()); - } else { - assertEquals(1, hit.getRating().get().intValue()); - } - } - } - if (entry.getKey() == "berlin_query") { - assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size()); - List hitsAndRatings = quality.getHitsAndRatings(); - assertEquals(6, hitsAndRatings.size()); - for (RatedSearchHit hit : hitsAndRatings) { - String id = hit.getSearchHit().getId(); - if (id.equals("1")) { - assertEquals(1, hit.getRating().get().intValue()); - } else { - assertFalse(hit.getRating().isPresent()); - } - } + Map partialResults = response.getPartialResults(); + assertEquals(2, partialResults.size()); + EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query"); + assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size()); + List hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings(); + assertEquals(7, hitsAndRatings.size()); + for (RatedSearchHit hit : hitsAndRatings) { + String id = hit.getSearchHit().getId(); + if (id.equals("1") || id.equals("6")) { + assertFalse(hit.getRating().isPresent()); + } else { + assertEquals(1, hit.getRating().get().intValue()); } } + EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query"); + assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size()); + hitsAndRatings = berlinQueryQuality.getHitsAndRatings(); + assertEquals(7, hitsAndRatings.size()); + for (RatedSearchHit hit : hitsAndRatings) { + String id = hit.getSearchHit().getId(); + if (id.equals("1")) { + assertEquals(1, hit.getRating().get().intValue()); + } else { + assertFalse(hit.getRating().isPresent()); + } + } + + // now try this when test2 is closed + client().performRequest("POST", "index2/_close", Collections.emptyMap()); + rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); } private static List createRelevant(String indexName, String... docs) { - List relevant = new ArrayList<>(); - for (String doc : docs) { - relevant.add(new RatedDocument(indexName, doc, 1)); - } - return relevant; + return Stream.of(docs).map(s -> new RatedDocument(indexName, s, 1)).collect(Collectors.toList()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index 75ac543fbb4..e560540236c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -1247,6 +1247,8 @@ public class RequestTests extends ESTestCase { new PrecisionAtK()); String[] indices = randomIndicesNames(0, 5); RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, indices); + Map expectedParams = new HashMap<>(); + setRandomIndicesOptions(rankEvalRequest::indicesOptions, rankEvalRequest::indicesOptions, expectedParams); Request request = Request.rankEval(rankEvalRequest); StringJoiner endpoint = new StringJoiner("/", "/", ""); @@ -1256,8 +1258,10 @@ public class RequestTests extends ESTestCase { } endpoint.add(RestRankEvalAction.ENDPOINT); assertEquals(endpoint.toString(), request.getEndpoint()); - assertEquals(Collections.emptyMap(), request.getParameters()); + assertEquals(3, request.getParameters().size()); + assertEquals(expectedParams, request.getParameters()); assertToXContentBody(spec, request.getEntity()); + } public void testSplit() throws IOException { diff --git a/distribution/packages/src/deb/copyright b/distribution/packages/src/deb/copyright index 1e647bbac54..98a923677c9 100644 --- a/distribution/packages/src/deb/copyright +++ b/distribution/packages/src/deb/copyright @@ -1,4 +1,4 @@ -Copyright 2013-2016 Elasticsearch +Copyright 2013-2018 Elasticsearch License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/docs/java-rest/license.asciidoc b/docs/java-rest/license.asciidoc index b097fe853b6..68797486827 100644 --- a/docs/java-rest/license.asciidoc +++ b/docs/java-rest/license.asciidoc @@ -1,6 +1,6 @@ == License -Copyright 2013-2017 Elasticsearch +Copyright 2013-2018 Elasticsearch Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/docs/perl/index.asciidoc b/docs/perl/index.asciidoc index 734447d0dd1..fc487c735eb 100644 --- a/docs/perl/index.asciidoc +++ b/docs/perl/index.asciidoc @@ -115,7 +115,7 @@ https://github.com/elastic/elasticsearch-perl/blob/master/CONTRIBUTING.asciidoc[ == Copyright and License -This software is Copyright (c) 2013-2016 by Elasticsearch BV. +This software is Copyright (c) 2013-2018 by Elasticsearch BV. This is free software, licensed under: https://github.com/elastic/elasticsearch-perl/blob/master/LICENSE.txt[The Apache License Version 2.0]. diff --git a/docs/python/index.asciidoc b/docs/python/index.asciidoc index 3dc71822996..ea1b6a837b5 100644 --- a/docs/python/index.asciidoc +++ b/docs/python/index.asciidoc @@ -120,7 +120,7 @@ some of the more engaging tasks like bulk indexing and reindexing. === License -Copyright 2013-2017 Elasticsearch +Copyright 2013-2018 Elasticsearch Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index 560cc68818a..db617d3301f 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -1,20 +1,12 @@ [[breaking_70_analysis_changes]] === Analysis changes -==== The `delimited_payload_filter` is renamed - -The `delimited_payload_filter` is renamed to `delimited_payload`, the old name is -deprecated and will be removed at some point, so it should be replaced by -`delimited_payload`. - - ==== Limiting the number of tokens produced by _analyze To safeguard against out of memory errors, the number of tokens that can be produced using the `_analyze` endpoint has been limited to 10000. This default limit can be changed for a particular index with the index setting `index.analyze.max_token_count`. - ==== Limiting the length of an analyzed text during highlighting Highlighting a text that was indexed without offsets or term vectors, @@ -22,4 +14,11 @@ requires analysis of this text in memory real time during the search request. For large texts this analysis may take substantial amount of time and memory. To protect against this, the maximum number of characters that will be analyzed has been limited to 1000000. This default limit can be changed -for a particular index with the index setting `index.highlight.max_analyzed_offset`. \ No newline at end of file +for a particular index with the index setting `index.highlight.max_analyzed_offset`. + +==== `delimited_payload_filter` renaming + +The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2. +Using it in indices created before 7.0 will issue deprecation warnings. Using the old +name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload` +instead. diff --git a/docs/ruby/copyright.asciidoc b/docs/ruby/copyright.asciidoc index cf5c19c9fee..3747cc572e4 100644 --- a/docs/ruby/copyright.asciidoc +++ b/docs/ruby/copyright.asciidoc @@ -1,5 +1,5 @@ == Copyright and License -This software is Copyright (c) 2013-2016 by Elasticsearch BV. +This software is Copyright (c) 2013-2018 by Elasticsearch BV. This is free software, licensed under The Apache License Version 2.0. diff --git a/libs/elasticsearch-core/build.gradle b/libs/elasticsearch-core/build.gradle index dea5664a14f..26a232664ad 100644 --- a/libs/elasticsearch-core/build.gradle +++ b/libs/elasticsearch-core/build.gradle @@ -26,6 +26,45 @@ apply plugin: 'nebula.maven-scm' archivesBaseName = 'elasticsearch-core' +// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 9 so we do not include this source set in our IDEs +if (!isEclipse && !isIdea) { + sourceSets { + java9 { + java { + srcDirs = ['src/main/java9'] + } + } + } + + configurations { + java9Compile.extendsFrom(compile) + } + + dependencies { + java9Compile sourceSets.main.output + } + + compileJava9Java { + sourceCompatibility = 9 + targetCompatibility = 9 + } + + /* Enable this when forbiddenapis was updated to 2.6. + * See: https://github.com/elastic/elasticsearch/issues/29292 + forbiddenApisJava9 { + targetCompatibility = 9 + } + */ + + jar { + metaInf { + into 'versions/9' + from sourceSets.java9.output + } + manifest.attributes('Multi-Release': 'true') + } +} + publishing { publications { nebula { @@ -39,6 +78,10 @@ dependencies { testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + if (!isEclipse && !isIdea) { + java9Compile sourceSets.main.output + } + if (isEclipse == false || project.path == ":libs:elasticsearch-core-tests") { testCompile("org.elasticsearch.test:framework:${version}") { exclude group: 'org.elasticsearch', module: 'elasticsearch-core' @@ -66,14 +109,14 @@ if (isEclipse) { } thirdPartyAudit.excludes = [ - // from log4j - 'org/osgi/framework/AdaptPermission', - 'org/osgi/framework/AdminPermission', - 'org/osgi/framework/Bundle', - 'org/osgi/framework/BundleActivator', - 'org/osgi/framework/BundleContext', - 'org/osgi/framework/BundleEvent', - 'org/osgi/framework/SynchronousBundleListener', - 'org/osgi/framework/wiring/BundleWire', - 'org/osgi/framework/wiring/BundleWiring' + // from log4j + 'org/osgi/framework/AdaptPermission', + 'org/osgi/framework/AdminPermission', + 'org/osgi/framework/Bundle', + 'org/osgi/framework/BundleActivator', + 'org/osgi/framework/BundleContext', + 'org/osgi/framework/BundleEvent', + 'org/osgi/framework/SynchronousBundleListener', + 'org/osgi/framework/wiring/BundleWire', + 'org/osgi/framework/wiring/BundleWiring' ] diff --git a/server/src/main/java/org/elasticsearch/common/collect/Tuple.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/common/collect/Tuple.java similarity index 97% rename from server/src/main/java/org/elasticsearch/common/collect/Tuple.java rename to libs/elasticsearch-core/src/main/java/org/elasticsearch/common/collect/Tuple.java index 2a0d860e1a3..70c7bcbc045 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Tuple.java +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/common/collect/Tuple.java @@ -46,7 +46,7 @@ public class Tuple { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Tuple tuple = (Tuple) o; + Tuple tuple = (Tuple) o; if (v1 != null ? !v1.equals(tuple.v1) : tuple.v1 != null) return false; if (v2 != null ? !v2.equals(tuple.v2) : tuple.v2 != null) return false; diff --git a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java index eaa4df768cd..4108992fb1f 100644 --- a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java @@ -41,45 +41,73 @@ public final class IOUtils { } /** - * Closes all given Closeables. Some of the Closeables may be null; they are ignored. After everything is closed, the - * method either throws the first exception it hit while closing, or completes normally if there were no exceptions. + * Closes all given Closeables. Some of the Closeables may be null; they are + * ignored. After everything is closed, the method either throws the first exception it hit + * while closing with other exceptions added as suppressed, or completes normally if there were + * no exceptions. * * @param objects objects to close */ public static void close(final Closeable... objects) throws IOException { - close(Arrays.asList(objects)); + close(null, Arrays.asList(objects)); } /** - * Closes all given {@link Closeable}s. + * Closes all given Closeables. Some of the Closeables may be null; they are + * ignored. After everything is closed, the method adds any exceptions as suppressed to the + * original exception, or throws the first exception it hit if {@code Exception} is null. If + * no exceptions are encountered and the passed in exception is null, it completes normally. * * @param objects objects to close + */ + public static void close(final Exception e, final Closeable... objects) throws IOException { + close(e, Arrays.asList(objects)); + } + + /** + * Closes all given Closeables. Some of the Closeables may be null; they are + * ignored. After everything is closed, the method either throws the first exception it hit + * while closing with other exceptions added as suppressed, or completes normally if there were + * no exceptions. + * + * @param objects objects to close + */ + public static void close(final Iterable objects) throws IOException { + close(null, objects); + } + + /** + * Closes all given {@link Closeable}s. If a non-null exception is passed in, or closing a + * stream causes an exception, throws the exception with other {@link RuntimeException} or + * {@link IOException} exceptions added as suppressed. + * + * @param ex existing Exception to add exceptions occurring during close to + * @param objects objects to close * * @see #close(Closeable...) */ - public static void close(final Iterable objects) throws IOException { - Exception ex = null; - + public static void close(final Exception ex, final Iterable objects) throws IOException { + Exception firstException = ex; for (final Closeable object : objects) { try { if (object != null) { object.close(); } } catch (final IOException | RuntimeException e) { - if (ex == null) { - ex = e; + if (firstException == null) { + firstException = e; } else { - ex.addSuppressed(e); + firstException.addSuppressed(e); } } } - if (ex != null) { - if (ex instanceof IOException) { - throw (IOException) ex; + if (firstException != null) { + if (firstException instanceof IOException) { + throw (IOException) firstException; } else { // since we only assigned an IOException or a RuntimeException to ex above, in this case ex must be a RuntimeException - throw (RuntimeException) ex; + throw (RuntimeException) firstException; } } } diff --git a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/Streams.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/Streams.java new file mode 100644 index 00000000000..a006028b905 --- /dev/null +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/Streams.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.core.internal.io; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Objects; + +/** + * Simple utility methods for file and stream copying. + * All copy methods use a block size of 4096 bytes, + * and close all affected streams when done. + *

+ * Mainly for use within the framework, + * but also useful for application code. + */ +public class Streams { + + /** + * Copy the contents of the given InputStream to the given OutputStream. + * Closes both streams when done. + * + * @param in the stream to copy from + * @param out the stream to copy to + * @return the number of bytes copied + * @throws IOException in case of I/O errors + */ + public static long copy(final InputStream in, final OutputStream out) throws IOException { + Objects.requireNonNull(in, "No InputStream specified"); + Objects.requireNonNull(out, "No OutputStream specified"); + final byte[] buffer = new byte[8192]; + Exception err = null; + try { + long byteCount = 0; + int bytesRead; + while ((bytesRead = in.read(buffer)) != -1) { + out.write(buffer, 0, bytesRead); + byteCount += bytesRead; + } + out.flush(); + return byteCount; + } catch (IOException | RuntimeException e) { + err = e; + throw e; + } finally { + IOUtils.close(err, in, out); + } + } +} diff --git a/libs/elasticsearch-core/src/main/java9/org/elasticsearch/core/internal/io/Streams.java b/libs/elasticsearch-core/src/main/java9/org/elasticsearch/core/internal/io/Streams.java new file mode 100644 index 00000000000..34b3785765d --- /dev/null +++ b/libs/elasticsearch-core/src/main/java9/org/elasticsearch/core/internal/io/Streams.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.core.internal.io; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * Simple utility methods for file and stream copying. + * All copy methods close all affected streams when done. + *

+ * Mainly for use within the framework, + * but also useful for application code. + */ +public abstract class Streams { + + /** + * Copy the contents of the given InputStream to the given OutputStream. + * Closes both streams when done. + * + * @param in the stream to copy from + * @param out the stream to copy to + * @return the number of bytes copied + * @throws IOException in case of I/O errors + */ + public static long copy(final InputStream in, final OutputStream out) throws IOException { + Exception err = null; + try { + final long byteCount = in.transferTo(out); + out.flush(); + return byteCount; + } catch (IOException | RuntimeException e) { + err = e; + throw e; + } finally { + IOUtils.close(err, in, out); + } + } +} diff --git a/libs/elasticsearch-core/src/test/java/org/elasticsearch/common/collect/TupleTests.java b/libs/elasticsearch-core/src/test/java/org/elasticsearch/common/collect/TupleTests.java new file mode 100644 index 00000000000..79a9969ad05 --- /dev/null +++ b/libs/elasticsearch-core/src/test/java/org/elasticsearch/common/collect/TupleTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.collect; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class TupleTests extends ESTestCase { + + public void testTuple() { + Tuple t1 = new Tuple<>(2L, "foo"); + Tuple t2 = new Tuple<>(2L, "foo"); + Tuple t3 = new Tuple<>(3L, "foo"); + Tuple t4 = new Tuple<>(2L, "bar"); + Tuple t5 = new Tuple<>(2, "foo"); + + assertThat(t1.v1(), equalTo(Long.valueOf(2L))); + assertThat(t1.v2(), equalTo("foo")); + + assertThat(t1, equalTo(t2)); + assertNotEquals(t1, t3); + assertNotEquals(t2, t3); + assertNotEquals(t2, t4); + assertNotEquals(t3, t4); + assertNotEquals(t1, t5); + + assertThat(t1.toString(), equalTo("Tuple [v1=2, v2=foo]")); + } +} diff --git a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/StreamsTests.java b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/StreamsTests.java new file mode 100644 index 00000000000..3908ef83500 --- /dev/null +++ b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/StreamsTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.core.internal.io; + +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import static org.hamcrest.Matchers.equalTo; + +public class StreamsTests extends ESTestCase { + public void testCopyFromInputStream() throws IOException { + byte[] content = "content".getBytes(StandardCharsets.UTF_8); + ByteArrayInputStream in = new ByteArrayInputStream(content); + ByteArrayOutputStream out = new ByteArrayOutputStream(content.length); + long count = Streams.copy(in, out); + + assertThat(count, equalTo((long) content.length)); + assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true)); + } +} diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 4cbeb848060..3800c7711a2 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -34,8 +34,10 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Collections; @@ -74,8 +76,6 @@ public final class Grok { private final Map patternBank; private final boolean namedCaptures; private final Regex compiledExpression; - private final String expression; - public Grok(Map patternBank, String grokPattern) { this(patternBank, grokPattern, true); @@ -86,11 +86,59 @@ public final class Grok { this.patternBank = patternBank; this.namedCaptures = namedCaptures; - this.expression = toRegex(grokPattern); + for (Map.Entry entry : patternBank.entrySet()) { + String name = entry.getKey(); + String pattern = entry.getValue(); + forbidCircularReferences(name, new ArrayList<>(), pattern); + } + + String expression = toRegex(grokPattern); byte[] expressionBytes = expression.getBytes(StandardCharsets.UTF_8); this.compiledExpression = new Regex(expressionBytes, 0, expressionBytes.length, Option.DEFAULT, UTF8Encoding.INSTANCE); } + /** + * Checks whether patterns reference each other in a circular manner and if so fail with an exception + * + * In a pattern, anything between %{ and } or : is considered + * a reference to another named pattern. This method will navigate to all these named patterns and + * check for a circular reference. + */ + private void forbidCircularReferences(String patternName, List path, String pattern) { + if (pattern.contains("%{" + patternName + "}") || pattern.contains("%{" + patternName + ":")) { + String message; + if (path.isEmpty()) { + message = "circular reference in pattern [" + patternName + "][" + pattern + "]"; + } else { + message = "circular reference in pattern [" + path.remove(path.size() - 1) + "][" + pattern + + "] back to pattern [" + patternName + "]"; + // add rest of the path: + if (path.isEmpty() == false) { + message += " via patterns [" + String.join("=>", path) + "]"; + } + } + throw new IllegalArgumentException(message); + } + + for (int i = pattern.indexOf("%{"); i != -1; i = pattern.indexOf("%{", i + 1)) { + int begin = i + 2; + int brackedIndex = pattern.indexOf('}', begin); + int columnIndex = pattern.indexOf(':', begin); + int end; + if (brackedIndex != -1 && columnIndex == -1) { + end = brackedIndex; + } else if (columnIndex != -1 && brackedIndex == -1) { + end = columnIndex; + } else if (brackedIndex != -1 && columnIndex != -1) { + end = Math.min(brackedIndex, columnIndex); + } else { + throw new IllegalArgumentException("pattern [" + pattern + "] has circular references to other pattern definitions"); + } + String otherPatternName = pattern.substring(begin, end); + path.add(otherPatternName); + forbidCircularReferences(patternName, path, patternBank.get(otherPatternName)); + } + } public String groupMatch(String name, Region region, String pattern) { try { @@ -125,10 +173,12 @@ public final class Grok { String patternName = groupMatch(PATTERN_GROUP, region, grokPattern); String pattern = patternBank.get(patternName); - if (pattern == null) { throw new IllegalArgumentException("Unable to find pattern [" + patternName + "] in Grok's pattern dictionary"); } + if (pattern.contains("%{" + patternName + "}") || pattern.contains("%{" + patternName + ":")) { + throw new IllegalArgumentException("circular reference in pattern back [" + patternName + "]"); + } String grokPart; if (namedCaptures && subName != null) { diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index 931842d9f24..eb8d0e95487 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -28,6 +28,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -205,6 +206,65 @@ public class GrokTests extends ESTestCase { assertEquals(expected, actual); } + public void testCircularReference() { + Exception e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new HashMap<>(); + bank.put("NAME", "!!!%{NAME}!!!"); + String pattern = "%{NAME}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME][!!!%{NAME}!!!]", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new HashMap<>(); + bank.put("NAME", "!!!%{NAME:name}!!!"); + String pattern = "%{NAME}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME][!!!%{NAME:name}!!!]", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new HashMap<>(); + bank.put("NAME", "!!!%{NAME:name:int}!!!"); + String pattern = "%{NAME}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME][!!!%{NAME:name:int}!!!]", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new TreeMap<>(); + bank.put("NAME1", "!!!%{NAME2}!!!"); + bank.put("NAME2", "!!!%{NAME1}!!!"); + String pattern = "%{NAME1}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME2][!!!%{NAME1}!!!] back to pattern [NAME1]", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new TreeMap<>(); + bank.put("NAME1", "!!!%{NAME2}!!!"); + bank.put("NAME2", "!!!%{NAME3}!!!"); + bank.put("NAME3", "!!!%{NAME1}!!!"); + String pattern = "%{NAME1}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME3][!!!%{NAME1}!!!] back to pattern [NAME1] via patterns [NAME2]", + e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, () -> { + Map bank = new TreeMap<>(); + bank.put("NAME1", "!!!%{NAME2}!!!"); + bank.put("NAME2", "!!!%{NAME3}!!!"); + bank.put("NAME3", "!!!%{NAME4}!!!"); + bank.put("NAME4", "!!!%{NAME5}!!!"); + bank.put("NAME5", "!!!%{NAME1}!!!"); + String pattern = "%{NAME1}"; + new Grok(bank, pattern, false); + }); + assertEquals("circular reference in pattern [NAME5][!!!%{NAME1}!!!] back to pattern [NAME1] " + + "via patterns [NAME2=>NAME3=>NAME4]", e.getMessage()); + } + public void testBooleanCaptures() { Map bank = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java similarity index 93% rename from server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java rename to libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java index aeb4e53690a..d2a0e163180 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java @@ -21,10 +21,8 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; -import org.elasticsearch.common.xcontent.json.JsonXContent; import java.io.IOException; import java.util.ArrayList; @@ -214,17 +212,6 @@ public abstract class AbstractObjectParser declareField(consumer, (p, c) -> parseArray(p, () -> itemParser.parse(p, c)), field, type); } - public void declareRawObject(BiConsumer consumer, ParseField field) { - CheckedFunction bytesParser = p -> { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.prettyPrint(); - builder.copyCurrentStructure(p); - return BytesReference.bytes(builder); - } - }; - declareField(consumer, bytesParser, field, ValueType.OBJECT); - } - private interface IOSupplier { T get() throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java similarity index 97% rename from server/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java rename to libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java index 03f6b14f525..d61bd8a5dbb 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; @@ -161,7 +160,7 @@ public final class ConstructingObjectParser extends AbstractObje try { return parse(parser, context); } catch (IOException e) { - throw new ParsingException(parser.getTokenLocation(), "[" + objectParser.getName() + "] failed to parse object", e); + throw new XContentParseException(parser.getTokenLocation(), "[" + objectParser.getName() + "] failed to parse object", e); } } @@ -335,7 +334,7 @@ public final class ConstructingObjectParser extends AbstractObje try { consumer.accept(targetObject, v); } catch (Exception e) { - throw new ParsingException(location, + throw new XContentParseException(location, "[" + objectParser.getName() + "] failed to parse field [" + parseField.getPreferredName() + "]", e); } }); @@ -413,7 +412,7 @@ public final class ConstructingObjectParser extends AbstractObje private void queue(Consumer queueMe) { assert targetObject == null: "Don't queue after the targetObject has been built! Just apply the consumer directly."; if (queuedFields == null) { - @SuppressWarnings("unchecked") + @SuppressWarnings({"unchecked", "rawtypes"}) Consumer[] queuedFields = new Consumer[numberOfFields]; this.queuedFields = queuedFields; } @@ -471,11 +470,12 @@ public final class ConstructingObjectParser extends AbstractObje queuedFieldsCount -= 1; queuedFields[queuedFieldsCount].accept(targetObject); } - } catch (ParsingException e) { - throw new ParsingException(e.getLineNumber(), e.getColumnNumber(), - "failed to build [" + objectParser.getName() + "] after last required field arrived", e); + } catch (XContentParseException e) { + throw new XContentParseException(e.getLocation(), + "failed to build [" + objectParser.getName() + "] after last required field arrived", e); } catch (Exception e) { - throw new ParsingException(null, "Failed to build [" + objectParser.getName() + "] after last required field arrived", e); + throw new XContentParseException(null, + "Failed to build [" + objectParser.getName() + "] after last required field arrived", e); } } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java similarity index 93% rename from server/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java rename to libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index 1a3be1a5a7b..71b888bf44a 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; import java.io.IOException; import java.lang.reflect.Array; @@ -147,7 +146,7 @@ public final class ObjectParser extends AbstractObjectParser extends AbstractObjectParser extends AbstractObjectParser extends AbstractObjectParser objectParser = (XContentParser p, Context c) -> { if (p.currentToken() != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + "fields or an array where each entry is an object with a single field"); } // This messy exception nesting has the nice side effect of telling the use which field failed to parse @@ -242,10 +241,10 @@ public final class ObjectParser extends AbstractObjectParser { @@ -261,14 +260,14 @@ public final class ObjectParser extends AbstractObjectParser extends AbstractObjectParser extends AbstractObjectParser extends AbstractObjectParser l.lineNumber).orElse(-1); } @@ -45,8 +52,14 @@ public class XContentParseException extends IllegalArgumentException { return location.map(l -> l.columnNumber).orElse(-1); } + @Nullable + public XContentLocation getLocation() { + return location.orElse(null); + } + @Override public String getMessage() { return location.map(l -> "[" + l.toString() + "] ").orElse("") + super.getMessage(); } + } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java similarity index 84% rename from server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java rename to libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index 9f24861fdaa..7488cfd7e9c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -22,14 +22,12 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParserTests.NamedObject; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.List; @@ -38,6 +36,7 @@ import static java.util.Collections.unmodifiableList; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -79,7 +78,8 @@ public class ConstructingObjectParserTests extends ESTestCase { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); expected.toXContent(builder, ToXContent.EMPTY_PARAMS); builder = shuffleXContent(builder); - BytesReference bytes = BytesReference.bytes(builder); + builder.flush(); + byte[] bytes = ((ByteArrayOutputStream) builder.getOutputStream()).toByteArray(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { HasCtorArguments parsed = randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null); assertEquals(expected.animal, parsed.animal); @@ -90,9 +90,6 @@ public class ConstructingObjectParserTests extends ESTestCase { assertEquals(expected.b, parsed.b); assertEquals(expected.c, parsed.c); assertEquals(expected.d, parsed.d); - } catch (Exception e) { - // It is convenient to decorate the error message with the json - throw new Exception("Error parsing: [" + Strings.toString(builder) + "]", e); } } @@ -175,7 +172,7 @@ public class ConstructingObjectParserTests extends ESTestCase { + " \"vegetable\": 1,\n" + " \"vegetable\": 2\n" + "}"); - Throwable e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); + Throwable e = expectThrows(XContentParseException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); assertEquals("[has_required_arguments] failed to parse field [vegetable]", e.getMessage()); e = e.getCause(); assertThat(e, instanceOf(IllegalArgumentException.class)); @@ -189,8 +186,9 @@ public class ConstructingObjectParserTests extends ESTestCase { + " \"vegetable\": 2,\n" + " \"a\": \"supercalifragilisticexpialidocious\"\n" + "}"); - ParsingException e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); - assertEquals("[has_required_arguments] failed to parse field [a]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, + () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); + assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [a]")); assertEquals(4, e.getLineNumber()); assertEquals("[a] must be less than 10 characters in length but was [supercalifragilisticexpialidocious]", e.getCause().getMessage()); @@ -203,14 +201,15 @@ public class ConstructingObjectParserTests extends ESTestCase { + " \"animal\": \"cat\"\n," + " \"vegetable\": 2\n" + "}"); - ParsingException e = expectThrows(ParsingException.class, () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); - assertEquals("[has_required_arguments] failed to parse field [vegetable]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, + () -> randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null)); + assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [vegetable]")); assertEquals(4, e.getLineNumber()); - e = (ParsingException) e.getCause(); - assertEquals("failed to build [has_required_arguments] after last required field arrived", e.getMessage()); + e = (XContentParseException) e.getCause(); + assertThat(e.getMessage(), containsString("failed to build [has_required_arguments] after last required field arrived")); assertEquals(2, e.getLineNumber()); - e = (ParsingException) e.getCause(); - assertEquals("[has_required_arguments] failed to parse field [a]", e.getMessage()); + e = (XContentParseException) e.getCause(); + assertThat(e.getMessage(), containsString("[has_required_arguments] failed to parse field [a]")); assertEquals(2, e.getLineNumber()); assertEquals("[a] must be less than 10 characters in length but was [supercalifragilisticexpialidocious]", e.getCause().getMessage()); @@ -465,11 +464,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " {\"c\": {}}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException { @@ -479,11 +478,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " {\"c\": {}, \"d\": {}}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage()); - assertEquals( - "[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a " - + "single field", e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]")); + assertThat(e.getCause().getMessage(), + containsString("[named_in_constructor] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectNoFieldsInArray() throws IOException { @@ -493,11 +492,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " {\"a\": {}}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOException { @@ -507,11 +506,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " {}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage()); - assertEquals( - "[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a " - + "single field", e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]")); + assertThat(e.getCause().getMessage(), + containsString("[named_in_constructor] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectJunkInArray() throws IOException { @@ -521,11 +520,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " {\"a\": {}}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException { @@ -535,11 +534,11 @@ public class ConstructingObjectParserTests extends ESTestCase { + "],\"named_in_constructor\": [\n" + " \"junk\"" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage()); - assertEquals( - "[named_in_constructor] can be a single object with any number of fields or an array where each entry is an object with a " - + "single field", e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]")); + assertThat(e.getCause().getMessage(), + containsString("[named_in_constructor] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectInOrderNotSupported() throws IOException { @@ -558,8 +557,8 @@ public class ConstructingObjectParserTests extends ESTestCase { objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named")); // Now firing the xml through it fails - ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); assertEquals("[named] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage()); } @@ -579,9 +578,10 @@ public class ConstructingObjectParserTests extends ESTestCase { objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named")); // Now firing the xml through it fails - ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named_in_constructor]", e.getMessage()); - assertEquals("[named_in_constructor] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]")); + assertThat(e.getCause().getMessage(), + containsString("[named_in_constructor] doesn't support arrays. Use a single object with multiple fields.")); } static class NamedObjectHolder { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java rename to libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 6f0c0208b9c..3dd33e997b2 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -20,14 +20,13 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; import java.net.URI; @@ -199,8 +198,8 @@ public class ObjectParserTests extends ESTestCase { try { objectParser.parse(parser, s, null); fail("numeric value expected"); - } catch (ParsingException ex) { - assertEquals(ex.getMessage(), "[the_parser] failed to parse field [test]"); + } catch (XContentParseException ex) { + assertThat(ex.getMessage(), containsString("[the_parser] failed to parse field [test]")); assertTrue(ex.getCause() instanceof NumberFormatException); } @@ -235,7 +234,7 @@ public class ObjectParserTests extends ESTestCase { TestStruct s = new TestStruct(); objectParser.declareField((i, c, x) -> c.test = i.text(), new ParseField("numeric_value"), ObjectParser.ValueType.FLOAT); - Exception e = expectThrows(ParsingException.class, () -> objectParser.parse(parser, s, null)); + Exception e = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null)); assertThat(e.getMessage(), containsString("[foo] numeric_value doesn't support values of type: VALUE_BOOLEAN")); } @@ -478,11 +477,11 @@ public class ObjectParserTests extends ESTestCase { "{\"named\": [\n" + " {\"a\": {}, \"b\": {}}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectNoFieldsInArray() throws IOException { @@ -490,11 +489,11 @@ public class ObjectParserTests extends ESTestCase { "{\"named\": [\n" + " {}" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectJunkInArray() throws IOException { @@ -502,11 +501,11 @@ public class ObjectParserTests extends ESTestCase { "{\"named\": [\n" + " \"junk\"" + "]}"); - ParsingException e = expectThrows(ParsingException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); - assertEquals( - "[named] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); + assertThat(e.getCause().getMessage(), + containsString("[named] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testParseNamedObjectInOrderNotSupported() throws IOException { @@ -521,8 +520,8 @@ public class ObjectParserTests extends ESTestCase { objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named")); // Now firing the xml through it fails - ParsingException e = expectThrows(ParsingException.class, () -> objectParser.apply(parser, null)); - assertEquals("[named_object_holder] failed to parse field [named]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); assertEquals("[named] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage()); } @@ -535,7 +534,9 @@ public class ObjectParserTests extends ESTestCase { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); + b.flush(); + byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray(); + XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); class TestStruct { public String test; @@ -559,7 +560,9 @@ public class ObjectParserTests extends ESTestCase { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); + b.flush(); + byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray(); + XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); class TestStruct { public String test; @@ -587,7 +590,9 @@ public class ObjectParserTests extends ESTestCase { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); + b.flush(); + byte[] bytes = ((ByteArrayOutputStream) b.getOutputStream()).toByteArray(); + XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); class TestStruct { public String test; } @@ -646,8 +651,8 @@ public class ObjectParserTests extends ESTestCase { // Make sure that we didn't break the null handling in arrays that shouldn't support nulls XContentParser parser2 = createParser(JsonXContent.jsonXContent, "{\"int_array\": [1, null, 3]}"); TestStruct s2 = new TestStruct(); - ParsingException ex = expectThrows(ParsingException.class, () -> objectParser.parse(parser2, s2, null)); - assertThat(ex.getMessage(), startsWith("[foo] failed to parse field [int_array]")); + XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser2, s2, null)); + assertThat(ex.getMessage(), containsString("[foo] failed to parse field [int_array]")); } static class NamedObjectHolder { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java index 484c9d9b128..06c179d95f7 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java @@ -32,6 +32,10 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, env, name, settings); + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) { + throw new IllegalArgumentException( + "[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead"); + } if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) { DEPRECATION_LOGGER.deprecated("Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"); } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index bfb6c97c24f..3dca3bfd777 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -1026,15 +1026,13 @@ - match: { tokens.10.token: ちた } --- -"delimited_payload_filter": +"delimited_payload_filter_error": - skip: - version: " - 6.1.99" - reason: delimited_payload_filter deprecated in 6.2, replaced by delimited_payload - features: "warnings" + version: " - 6.99.99" + reason: using delimited_payload_filter throws error from 7.0 on - do: - warnings: - - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]" + catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/ indices.create: index: test body: @@ -1045,29 +1043,15 @@ type: delimited_payload_filter delimiter: ^ encoding: identity - - do: - warnings: - - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]" - indices.analyze: - index: test - body: - text: foo^bar - tokenizer: keyword - filter: [my_delimited_payload_filter] - - length: { tokens: 1 } - - match: { tokens.0.token: foo } # Test pre-configured token filter too: - do: - warnings: - - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]" + catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/ indices.analyze: body: text: foo|5 tokenizer: keyword filter: [delimited_payload_filter] - - length: { tokens: 1 } - - match: { tokens.0.token: foo } --- "delimited_payload": diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index f1a7add303f..d7f8e8838bb 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptService; @@ -30,6 +31,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; @@ -80,9 +82,9 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("source", "bar"); configMap.put("lang", "mockscript"); - ElasticsearchException exception = expectThrows(ElasticsearchException.class, + XContentParseException exception = expectThrows(XContentParseException.class, () -> factory.create(null, randomAlphaOfLength(10), configMap)); - assertThat(exception.getMessage(), is("[script] failed to parse field [source]")); + assertThat(exception.getMessage(), containsString("[script] failed to parse field [source]")); } public void testFactoryValidationAtLeastOneScriptingType() throws Exception { diff --git a/modules/lang-expression/licenses/lucene-expressions-7.3.0-snapshot-98a6b3d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index a92cbe30450..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -38ff5a1f4bcbfb6e1ffacd3263175c2a1ba23e9f \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1 new file mode 100644 index 00000000000..62a094a8b0f --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.3.0.jar.sha1 @@ -0,0 +1 @@ +cb82d9db3043bbd25b4d0eb5022ed1e529c936d3 \ No newline at end of file diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java index 51b7df3fc2c..9cdca70f0e1 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.script.ScriptType; @@ -122,7 +122,7 @@ public class SearchTemplateRequestTests extends ESTestCase { public void testParseWrongTemplate() { // Unclosed template id - expectThrows(ParsingException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }"))); + expectThrows(XContentParseException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }"))); } /** diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 8f1bb2a9310..e43089173db 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; @@ -235,20 +236,18 @@ final class QueryAnalyzer { return new Result(true, Collections.emptySet(), 0); } - if (version.onOrAfter(Version.V_6_1_0)) { - Set extractions = new HashSet<>(); - for (Term[] termArr : terms) { - extractions.addAll(Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet())); + // This query has the same problem as boolean queries when it comes to duplicated terms + // So to keep things simple, we just rewrite to a boolean query + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (Term[] termArr : terms) { + BooleanQuery.Builder subBuilder = new BooleanQuery.Builder(); + for (Term term : termArr) { + subBuilder.add(new TermQuery(term), Occur.SHOULD); } - return new Result(false, extractions, terms.length); - } else { - Set bestTermArr = null; - for (Term[] termArr : terms) { - Set queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet()); - bestTermArr = selectBestExtraction(bestTermArr, queryExtractions); - } - return new Result(false, bestTermArr, 1); + builder.add(subBuilder.build(), Occur.FILTER); } + // Make sure to unverify the result + return booleanQuery().apply(builder.build(), version).unverify(); }; } @@ -263,41 +262,35 @@ final class QueryAnalyzer { return (query, version) -> { SpanNearQuery spanNearQuery = (SpanNearQuery) query; if (version.onOrAfter(Version.V_6_1_0)) { - Set results = Arrays.stream(spanNearQuery.getClauses()).map(clause -> analyze(clause, version)).collect(toSet()); - int msm = 0; - Set extractions = new HashSet<>(); - Set seenRangeFields = new HashSet<>(); - for (Result result : results) { - QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]); - if (result.extractions.size() == 1 && t[0].range != null) { - if (seenRangeFields.add(t[0].range.fieldName)) { - msm += 1; - } - } else { - msm += result.minimumShouldMatch; - } - extractions.addAll(result.extractions); + // This has the same problem as boolean queries when it comes to duplicated clauses + // so we rewrite to a boolean query to keep things simple. + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (SpanQuery clause : spanNearQuery.getClauses()) { + builder.add(clause, Occur.FILTER); } - return new Result(false, extractions, msm); + // make sure to unverify the result + return booleanQuery().apply(builder.build(), version).unverify(); } else { - Set bestClauses = null; + Result bestClause = null; for (SpanQuery clause : spanNearQuery.getClauses()) { Result temp = analyze(clause, version); - bestClauses = selectBestExtraction(temp.extractions, bestClauses); + bestClause = selectBestResult(temp, bestClause); } - return new Result(false, bestClauses, 1); + return bestClause; } }; } private static BiFunction spanOrQuery() { return (query, version) -> { - Set terms = new HashSet<>(); SpanOrQuery spanOrQuery = (SpanOrQuery) query; + // handle it like a boolean query to not dulplicate eg. logic + // about duplicated terms + BooleanQuery.Builder builder = new BooleanQuery.Builder(); for (SpanQuery clause : spanOrQuery.getClauses()) { - terms.addAll(analyze(clause, version).extractions); + builder.add(clause, Occur.SHOULD); } - return new Result(false, terms, Math.min(1, terms.size())); + return booleanQuery().apply(builder.build(), version); }; } @@ -423,9 +416,13 @@ final class QueryAnalyzer { } } } else { - Set bestClause = null; + Result bestClause = null; UnsupportedQueryException uqe = null; + boolean hasProhibitedClauses = false; for (BooleanClause clause : clauses) { + if (clause.isProhibited()) { + hasProhibitedClauses = true; + } if (clause.isRequired() == false) { // skip must_not clauses, we don't need to remember the things that do *not* match... // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, @@ -440,17 +437,20 @@ final class QueryAnalyzer { uqe = e; continue; } - bestClause = selectBestExtraction(temp.extractions, bestClause); + bestClause = selectBestResult(temp, bestClause); } if (bestClause != null) { - return new Result(false, bestClause, 1); + if (hasProhibitedClauses || minimumShouldMatch > 0) { + bestClause = bestClause.unverify(); + } + return bestClause; } else { if (uqe != null) { // we're unable to select the best clause and an exception occurred, so we bail throw uqe; } else { // We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries, - return new Result(true, Collections.emptySet(), 1); + return new Result(true, Collections.emptySet(), 0); } } } @@ -616,22 +616,40 @@ final class QueryAnalyzer { } } - static Set selectBestExtraction(Set extractions1, Set extractions2) { - assert extractions1 != null || extractions2 != null; - if (extractions1 == null) { - return extractions2; - } else if (extractions2 == null) { - return extractions1; + /** + * Return an extraction for the conjunction of {@code result1} and {@code result2} + * by picking up clauses that look most restrictive and making it unverified if + * the other clause is not null and doesn't match all documents. This is used by + * 6.0.0 indices which didn't use the terms_set query. + */ + static Result selectBestResult(Result result1, Result result2) { + assert result1 != null || result2 != null; + if (result1 == null) { + return result2; + } else if (result2 == null) { + return result1; + } else if (result1.matchAllDocs) { // conjunction with match_all + Result result = result2; + if (result1.verified == false) { + result = result.unverify(); + } + return result; + } else if (result2.matchAllDocs) { // conjunction with match_all + Result result = result1; + if (result2.verified == false) { + result = result.unverify(); + } + return result; } else { // Prefer term based extractions over range based extractions: boolean onlyRangeBasedExtractions = true; - for (QueryExtraction clause : extractions1) { + for (QueryExtraction clause : result1.extractions) { if (clause.term != null) { onlyRangeBasedExtractions = false; break; } } - for (QueryExtraction clause : extractions2) { + for (QueryExtraction clause : result2.extractions) { if (clause.term != null) { onlyRangeBasedExtractions = false; break; @@ -639,28 +657,28 @@ final class QueryAnalyzer { } if (onlyRangeBasedExtractions) { - BytesRef extraction1SmallestRange = smallestRange(extractions1); - BytesRef extraction2SmallestRange = smallestRange(extractions2); + BytesRef extraction1SmallestRange = smallestRange(result1.extractions); + BytesRef extraction2SmallestRange = smallestRange(result2.extractions); if (extraction1SmallestRange == null) { - return extractions2; + return result2.unverify(); } else if (extraction2SmallestRange == null) { - return extractions1; + return result1.unverify(); } // Keep the clause with smallest range, this is likely to be the rarest. if (extraction1SmallestRange.compareTo(extraction2SmallestRange) <= 0) { - return extractions1; + return result1.unverify(); } else { - return extractions2; + return result2.unverify(); } } else { - int extraction1ShortestTerm = minTermLength(extractions1); - int extraction2ShortestTerm = minTermLength(extractions2); + int extraction1ShortestTerm = minTermLength(result1.extractions); + int extraction2ShortestTerm = minTermLength(result2.extractions); // keep the clause with longest terms, this likely to be rarest. if (extraction1ShortestTerm >= extraction2ShortestTerm) { - return extractions1; + return result1.unverify(); } else { - return extractions2; + return result2.unverify(); } } } @@ -695,6 +713,13 @@ final class QueryAnalyzer { return min; } + /** + * Query extraction result. A result is a candidate for a given document either if: + * - `matchAllDocs` is true + * - `extractions` and the document have `minimumShouldMatch` terms in common + * Further more, the match doesn't need to be verified if `verified` is true, checking + * `matchAllDocs` and `extractions` is enough. + */ static class Result { final Set extractions; @@ -702,24 +727,32 @@ final class QueryAnalyzer { final int minimumShouldMatch; final boolean matchAllDocs; - Result(boolean verified, Set extractions, int minimumShouldMatch) { + private Result(boolean matchAllDocs, boolean verified, Set extractions, int minimumShouldMatch) { if (minimumShouldMatch > extractions.size()) { throw new IllegalArgumentException("minimumShouldMatch can't be greater than the number of extractions: " + minimumShouldMatch + " > " + extractions.size()); } + this.matchAllDocs = matchAllDocs; this.extractions = extractions; this.verified = verified; this.minimumShouldMatch = minimumShouldMatch; - this.matchAllDocs = false; + } + + Result(boolean verified, Set extractions, int minimumShouldMatch) { + this(false, verified, extractions, minimumShouldMatch); } Result(boolean matchAllDocs, boolean verified) { - this.extractions = Collections.emptySet(); - this.verified = verified; - this.minimumShouldMatch = 0; - this.matchAllDocs = matchAllDocs; + this(matchAllDocs, verified, Collections.emptySet(), 0); } + Result unverify() { + if (verified) { + return new Result(matchAllDocs, false, extractions, minimumShouldMatch); + } else { + return this; + } + } } static class QueryExtraction { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index d9977c388b2..b5561e07021 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -74,7 +74,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.percolator.QueryAnalyzer.UnsupportedQueryException; import static org.elasticsearch.percolator.QueryAnalyzer.analyze; -import static org.elasticsearch.percolator.QueryAnalyzer.selectBestExtraction; +import static org.elasticsearch.percolator.QueryAnalyzer.selectBestResult; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -163,6 +163,20 @@ public class QueryAnalyzerTests extends ESTestCase { assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_very_long_term")); } + public void testExtractQueryMetadata_multiPhraseQuery_dups() { + MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() + .add(new Term("_field", "_term1")) + .add(new Term[] {new Term("_field", "_term1"), new Term("_field", "_term2")}) + .build(); + + Result result = analyze(multiPhraseQuery, Version.CURRENT); + assertFalse(result.matchAllDocs); + assertFalse(result.verified); + assertTermsEqual(result.extractions, new Term("_field", "_term1"), new Term("_field", "_term2")); + assertEquals(1, result.minimumShouldMatch); // because of the dup term + } + + public void testExtractQueryMetadata_booleanQuery() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "term0")); @@ -370,18 +384,28 @@ public class QueryAnalyzerTests extends ESTestCase { builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); builder.add(phraseQuery, BooleanClause.Occur.SHOULD); - BooleanQuery booleanQuery = builder.build(); + Result result = analyze(booleanQuery, Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(2)); - List terms = new ArrayList<>(result.extractions); - assertThat(terms.size(), equalTo(2)); - terms.sort(Comparator.comparing(qt -> qt.term)); - assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); - assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); - assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field())); - assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes())); + assertTermsEqual(result.extractions, phraseQuery.getTerms()); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + booleanQuery = builder.build(); + result = analyze(booleanQuery, Version.CURRENT); + assertThat(result.matchAllDocs, is(true)); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(0)); + assertTermsEqual(result.extractions); + + result = analyze(booleanQuery, Version.V_6_0_0); + assertThat(result.matchAllDocs, is(true)); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(0)); + assertTermsEqual(result.extractions); } public void testExactMatch_booleanQuery() { @@ -651,7 +675,7 @@ public class QueryAnalyzerTests extends ESTestCase { SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2); Result result = analyze(spanOrQuery, Version.CURRENT); - assertThat(result.verified, is(false)); + assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } @@ -943,64 +967,111 @@ public class QueryAnalyzerTests extends ESTestCase { assertThat(result.extractions.isEmpty(), is(true)); } - public void testSelectBestExtraction() { + public void testSelectBestResult() { Set queryTerms1 = terms(new int[0], "12", "1234", "12345"); + Result result1 = new Result(true, queryTerms1, 1); Set queryTerms2 = terms(new int[0], "123", "1234", "12345"); - Set result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms2, result); + Result result2 = new Result(true, queryTerms2, 1); + Result result = selectBestResult(result1, result2); + assertSame(queryTerms2, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{1, 2, 3}); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{2, 3, 4}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms1, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{4, 5, 6}); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{1, 2, 3}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms2, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms2, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456"); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms1, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{10}); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms2, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms2, result.extractions); queryTerms1 = terms(new int[]{10}, "123"); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms1, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{10}, "1", "123"); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{1}, "1", "2"); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame(queryTerms1, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame(queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456"); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", queryTerms1, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", + queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{}); + result1 = new Result(false, queryTerms1, 0); queryTerms2 = terms(new int[]{}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame("In case query extractions are empty", queryTerms2, result); + result2 = new Result(false, queryTerms2, 0); + result = selectBestResult(result1, result2); + assertSame("In case query extractions are empty", queryTerms2, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{1}); + result1 = new Result(true, queryTerms1, 1); queryTerms2 = terms(new int[]{}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame("In case query a single extraction is empty", queryTerms1, result); + result2 = new Result(false, queryTerms2, 0); + result = selectBestResult(result1, result2); + assertSame("In case query a single extraction is empty", queryTerms1, result.extractions); + assertFalse(result.verified); queryTerms1 = terms(new int[]{}); + result1 = new Result(false, queryTerms1, 0); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(queryTerms1, queryTerms2); - assertSame("In case query a single extraction is empty", queryTerms2, result); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame("In case query a single extraction is empty", queryTerms2, result.extractions); + assertFalse(result.verified); + + result1 = new Result(true, true); + queryTerms2 = terms(new int[]{1}); + result2 = new Result(true, queryTerms2, 1); + result = selectBestResult(result1, result2); + assertSame("Conjunction with a match_all", result2, result); + assertTrue(result.verified); + + queryTerms1 = terms(new int[]{1}); + result1 = new Result(true, queryTerms2, 1); + result2 = new Result(true, true); + result = selectBestResult(result1, result2); + assertSame("Conjunction with a match_all", result1, result); + assertTrue(result.verified); } - public void testSelectBestExtraction_random() { + public void testselectBestResult_random() { Set terms1 = new HashSet<>(); int shortestTerms1Length = Integer.MAX_VALUE; int sumTermLength = randomIntBetween(1, 128); @@ -1021,9 +1092,11 @@ public class QueryAnalyzerTests extends ESTestCase { sumTermLength -= length; } - Set result = selectBestExtraction(terms1, terms2); + Result result1 = new Result(true, terms1, 1); + Result result2 = new Result(true, terms2, 1); + Result result = selectBestResult(result1, result2); Set expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2; - assertThat(result, sameInstance(expected)); + assertThat(result.extractions, sameInstance(expected)); } public void testPointRangeQuery() { diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java index 34cf953ea50..c3616d3b9b5 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.rankeval; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -109,6 +110,7 @@ public class RestRankEvalAction extends BaseRestHandler { private static void parseRankEvalRequest(RankEvalRequest rankEvalRequest, RestRequest request, XContentParser parser) { rankEvalRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); + rankEvalRequest.indicesOptions(IndicesOptions.fromRequest(request, rankEvalRequest.indicesOptions())); RankEvalSpec spec = RankEvalSpec.parse(parser); rankEvalRequest.setRankEvalSpec(spec); } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index 50ab9bcf272..a076b93fbd3 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -126,7 +126,9 @@ public class TransportRankEvalAction extends HandledTransportAction relevantDocs = createRelevant("2", "3", "4", "5", "6"); + relevantDocs.add(new RatedDocument("test2", "7", TestRatingEnum.RELEVANT.ordinal())); + List specifications = new ArrayList<>(); + specifications.add(new RatedRequest("amsterdam_query", relevantDocs, amsterdamQuery)); + RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); + + RankEvalRequest request = new RankEvalRequest(task, new String[] { "test", "test2" }); + request.setRankEvalSpec(task); + + RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + Breakdown details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + assertEquals(7, details.getRetrieved()); + assertEquals(6, details.getRelevantRetrieved()); + + // test that ignore_unavailable=true works but returns one result less + assertTrue(client().admin().indices().prepareClose("test2").get().isAcknowledged()); + + request.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + assertEquals(6, details.getRetrieved()); + assertEquals(5, details.getRelevantRetrieved()); + + // test that ignore_unavailable=false or default settings throw an IndexClosedException + assertTrue(client().admin().indices().prepareClose("test2").get().isAcknowledged()); + request.indicesOptions(IndicesOptions.fromParameters(null, "false", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + assertEquals(1, response.getFailures().size()); + assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexClosedException.class)); + + // test expand_wildcards + request = new RankEvalRequest(task, new String[] { "tes*" }); + request.indicesOptions(IndicesOptions.fromParameters("none", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + assertEquals(0, details.getRetrieved()); + + request.indicesOptions(IndicesOptions.fromParameters("open", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + assertEquals(6, details.getRetrieved()); + assertEquals(5, details.getRelevantRetrieved()); + + request.indicesOptions(IndicesOptions.fromParameters("closed", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + assertEquals(1, response.getFailures().size()); + assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexClosedException.class)); + + // test allow_no_indices + request = new RankEvalRequest(task, new String[] { "bad*" }); + request.indicesOptions(IndicesOptions.fromParameters(null, null, "true", SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + assertEquals(0, details.getRetrieved()); + + request.indicesOptions(IndicesOptions.fromParameters(null, null, "false", SearchRequest.DEFAULT_INDICES_OPTIONS)); + response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); + assertEquals(1, response.getFailures().size()); + assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexNotFoundException.class)); + } + private static List createRelevant(String... docs) { List relevant = new ArrayList<>(); for (String doc : docs) { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index 94338e570a5..b49811a9bca 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -51,6 +51,7 @@ import java.util.function.Supplier; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; public class RankEvalSpecTests extends ESTestCase { @@ -133,7 +134,7 @@ public class RankEvalSpecTests extends ESTestCase { BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { Exception exception = expectThrows(Exception.class, () -> RankEvalSpec.parse(parser)); - assertThat(exception.getMessage(), startsWith("[rank_eval] failed to parse field")); + assertThat(exception.getMessage(), containsString("[rank_eval] failed to parse field")); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index 0f23178c683..ad962178f58 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.rankeval; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -27,6 +28,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -51,6 +53,7 @@ import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; public class RatedRequestsTests extends ESTestCase { @@ -134,11 +137,13 @@ public class RatedRequestsTests extends ESTestCase { BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { Exception exception = expectThrows(Exception.class, () -> RatedRequest.fromXContent(parser)); - if (exception instanceof IllegalArgumentException) { - assertThat(exception.getMessage(), startsWith("[request] unknown field")); + if (exception instanceof XContentParseException) { + XContentParseException xcpe = (XContentParseException) exception; + assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("unknown field")); + assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("parser not found")); } - if (exception instanceof ParsingException) { - assertThat(exception.getMessage(), startsWith("[request] failed to parse field")); + if (exception instanceof XContentParseException) { + assertThat(exception.getMessage(), containsString("[request] failed to parse field")); } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 80419a9b9d7..566c97c61c4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.ResponseException; @@ -199,7 +200,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, content)) { parsedResponse = parser.apply(xContentParser, xContentType); - } catch (ParsingException e) { + } catch (XContentParseException e) { /* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it * is totally wrong and we're probably not talking to Elasticsearch. */ throw new ElasticsearchException( diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 79b3f8c5df4..f33fa98f0e3 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -26,6 +26,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,6 +39,7 @@ import org.junit.Before; import java.io.IOException; import java.net.InetAddress; +import java.net.URI; import java.net.URL; import java.util.List; import java.util.Map; @@ -46,6 +48,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -72,17 +75,21 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas Map clusterSettings = entityAsMap(clusterSettingsResponse); @SuppressWarnings("unchecked") - List pathRepo = (List) XContentMapValues.extractValue("defaults.path.repo", clusterSettings); - assertThat(pathRepo, hasSize(1)); + List pathRepos = (List) XContentMapValues.extractValue("defaults.path.repo", clusterSettings); + assertThat(pathRepos, notNullValue()); + assertThat(pathRepos, hasSize(1)); + + final String pathRepo = pathRepos.get(0); + final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize(); // Create a FS repository using the path.repo location Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(), - buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo.get(0)).build())); + buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build())); assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); // Create a URL repository using the file://{path.repo} URL Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(), - buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", "file://" + pathRepo.get(0)).build())); + buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build())); assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); // Create a URL repository using the http://{fixture} URL diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 49aa857cf94..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ece1b4232697fad170c589f0df887efa6e66dd4f \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1 new file mode 100644 index 00000000000..de70972e975 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.3.0.jar.sha1 @@ -0,0 +1 @@ +c09216a18658d5b2912566efff8665e45edc24b4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 16f43319ded..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a16521e8f7240a9b93ea8ced157298b9d18bca43 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1 new file mode 100644 index 00000000000..40ff3efe264 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.3.0.jar.sha1 @@ -0,0 +1 @@ +c9d5bbd0affa90b46e173c762c35419a54977c35 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index e86c0765b38..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0dc6db8e16bf1ed6ebaa914fcbfbb4970af23747 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1 new file mode 100644 index 00000000000..9442635addd --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.3.0.jar.sha1 @@ -0,0 +1 @@ +4e6c63fa8ae005d81d12f0d88ffa98346b443ac4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index b6f58cf3fe6..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de43b057e8800f6c7b26907035664feb686127af \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1 new file mode 100644 index 00000000000..780824c4d45 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.3.0.jar.sha1 @@ -0,0 +1 @@ +37b7ff0a6493f139cb77f5bda965ac0189c8efd1 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index cac837ab4a6..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5e6a6d99a04ea5121bfd77470a7818725516ead \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1 new file mode 100644 index 00000000000..ba241e6a099 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.3.0.jar.sha1 @@ -0,0 +1 @@ +d189185da23b2221c4d532da5e2cacce735f8a0c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0-snapshot-98a6b3d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 909569fec9c..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d755dcef8763b783b7cbba7154a62f91e413007c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1 new file mode 100644 index 00000000000..fb7e5befe47 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.3.0.jar.sha1 @@ -0,0 +1 @@ +74462b51de45afe708f1042cc901fe7370413871 \ No newline at end of file diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index bb5e1e75781..77ebcfec532 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +import org.elasticsearch.gradle.test.AntFixture esplugin { description 'The Azure Repository plugin adds support for Azure storage repositories.' @@ -42,9 +43,28 @@ thirdPartyAudit.excludes = [ 'org.slf4j.LoggerFactory', ] -integTestCluster { - keystoreSetting 'azure.client.default.account', 'cloudazureresource' - keystoreSetting 'azure.client.default.key', 'abcdefgh' - keystoreSetting 'azure.client.secondary.account', 'cloudazureresource' - keystoreSetting 'azure.client.secondary.key', 'abcdefgh' +forbiddenApisTest { + // we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' } + +/** A task to start the fixture which emulates an Azure Storage service **/ +task azureStorageFixture(type: AntFixture) { + dependsOn compileTestJava + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" + executable = new File(project.runtimeJavaHome, 'bin/java') + args 'org.elasticsearch.repositories.azure.AzureStorageFixture', baseDir, 'container_test' +} + +integTestCluster { + dependsOn azureStorageFixture + + keystoreSetting 'azure.client.integration_test.account', "azure_integration_test_account" + /* The key is "azure_integration_test_key" encoded using base64 */ + keystoreSetting 'azure.client.integration_test.key', "YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk=" + // Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used + // in a hacky way to change the protocol and endpoint. We must fix that. + setting 'azure.client.integration_test.endpoint_suffix', + "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://${ -> azureStorageFixture.addressAndPort }" +} \ No newline at end of file diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java new file mode 100644 index 00000000000..025ee45b9c3 --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.StorageException; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreTestCase; + +import java.io.IOException; +import java.net.URISyntaxException; + +public class AzureBlobStoreTests extends ESBlobStoreTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + try { + RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); + AzureStorageServiceMock client = new AzureStorageServiceMock(); + return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); + } catch (URISyntaxException | StorageException e) { + throw new IOException(e); + } + } +} diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java deleted file mode 100644 index 981e0889e73..00000000000 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.azure; - -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.MockNode; -import org.elasticsearch.node.Node; - -import java.io.IOException; -import java.util.Collections; -import java.util.concurrent.CountDownLatch; - -/** - * Azure Repository - * Main class to easily run Azure from a IDE. - * It sets all the options to run the Azure plugin and access it from Sense. - * - * In order to run this class set configure the following: - * 1) Set `-Des.path.home=` to a directory containing an ES config directory - * 2) Set `-Dcloud.azure.storage.my_account.account=account_name` - * 3) Set `-Dcloud.azure.storage.my_account.key=account_key` - * - * Then you can run REST calls like: - *

- # Clean test env
- curl -XDELETE localhost:9200/foo?pretty
- curl -XDELETE localhost:9200/_snapshot/my_backup1?pretty
- curl -XDELETE localhost:9200/_snapshot/my_backup2?pretty
-
- # Create data
- curl -XPUT localhost:9200/foo/bar/1?pretty -d '{
- "foo": "bar"
- }'
- curl -XPOST localhost:9200/foo/_refresh?pretty
- curl -XGET localhost:9200/foo/_count?pretty
-
- # Create repository using default account
- curl -XPUT localhost:9200/_snapshot/my_backup1?pretty -d '{
-   "type": "azure"
- }'
-
- # Backup
- curl -XPOST "localhost:9200/_snapshot/my_backup1/snap1?pretty&wait_for_completion=true"
-
- # Remove data
- curl -XDELETE localhost:9200/foo?pretty
-
- # Restore data
- curl -XPOST "localhost:9200/_snapshot/my_backup1/snap1/_restore?pretty&wait_for_completion=true"
- curl -XGET localhost:9200/foo/_count?pretty
- 
- * - * If you want to define a secondary repository: - * - * 4) Set `-Dcloud.azure.storage.my_account.default=true` - * 5) Set `-Dcloud.azure.storage.my_account2.account=account_name` - * 6) Set `-Dcloud.azure.storage.my_account2.key=account_key_secondary` - * - * Then you can run REST calls like: - *
- # Remove data
- curl -XDELETE localhost:9200/foo?pretty
-
- # Create repository using account2 (secondary)
- curl -XPUT localhost:9200/_snapshot/my_backup2?pretty -d '{
-   "type": "azure",
-   "settings": {
-     "account" : "my_account2",
-     "location_mode": "secondary_only"
-   }
- }'
-
- # Restore data from the secondary endpoint
- curl -XPOST "localhost:9200/_snapshot/my_backup2/snap1/_restore?pretty&wait_for_completion=true"
- curl -XGET localhost:9200/foo/_count?pretty
- 
- */ -public class AzureRepositoryF { - public static void main(String[] args) throws Throwable { - Settings.Builder settings = Settings.builder(); - settings.put("http.cors.enabled", "true"); - settings.put("http.cors.allow-origin", "*"); - settings.put("cluster.name", AzureRepositoryF.class.getSimpleName()); - - // Example for azure repo settings - // settings.put("cloud.azure.storage.my_account1.account", "account_name"); - // settings.put("cloud.azure.storage.my_account1.key", "account_key"); - // settings.put("cloud.azure.storage.my_account1.default", true); - // settings.put("cloud.azure.storage.my_account2.account", "account_name"); - // settings.put("cloud.azure.storage.my_account2.key", "account_key_secondary"); - - final CountDownLatch latch = new CountDownLatch(1); - final Node node = new MockNode(settings.build(), Collections.singletonList(AzureRepositoryPlugin.class)); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - try { - IOUtils.close(node); - } catch (IOException e) { - throw new ElasticsearchException(e); - } finally { - latch.countDown(); - } - } - }); - node.start(); - latch.await(); - } -} diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 01b26bad343..26b02278edd 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -47,7 +47,6 @@ public class AzureRepositorySettingsTests extends ESTestCase { TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, null); } - public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException { assertThat(azureRepository(Settings.EMPTY).isReadOnly(), is(false)); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java new file mode 100644 index 00000000000..ebd8241e710 --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.mocksocket.MockHttpServer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; + +/** + * {@link AzureStorageFixture} is a fixture that emulates an Azure Storage service. + *

+ * It starts an asynchronous socket server that binds to a random local port. The server parses + * HTTP requests and uses a {@link AzureStorageTestServer} to handle them before returning + * them to the client as HTTP responses. + */ +public class AzureStorageFixture { + + public static void main(String[] args) throws Exception { + if (args == null || args.length != 2) { + throw new IllegalArgumentException("AzureStorageFixture "); + } + + final InetSocketAddress socketAddress = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0); + final HttpServer httpServer = MockHttpServer.createHttp(socketAddress, 0); + + try { + final Path workingDirectory = workingDir(args[0]); + /// Writes the PID of the current Java process in a `pid` file located in the working directory + writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); + + final String addressAndPort = addressToString(httpServer.getAddress()); + // Writes the address and port of the http server in a `ports` file located in the working directory + writeFile(workingDirectory, "ports", addressAndPort); + + // Emulates Azure + final String storageUrl = "http://" + addressAndPort; + final AzureStorageTestServer testServer = new AzureStorageTestServer(storageUrl); + testServer.createContainer(args[1]); + + httpServer.createContext("/", new ResponseHandler(testServer)); + httpServer.start(); + + // Wait to be killed + Thread.sleep(Long.MAX_VALUE); + + } finally { + httpServer.stop(0); + } + } + + @SuppressForbidden(reason = "Paths#get is fine - we don't have environment here") + private static Path workingDir(final String dir) { + return Paths.get(dir); + } + + private static void writeFile(final Path dir, final String fileName, final String content) throws IOException { + final Path tempPidFile = Files.createTempFile(dir, null, null); + Files.write(tempPidFile, singleton(content)); + Files.move(tempPidFile, dir.resolve(fileName), StandardCopyOption.ATOMIC_MOVE); + } + + private static String addressToString(final SocketAddress address) { + final InetSocketAddress inetSocketAddress = (InetSocketAddress) address; + if (inetSocketAddress.getAddress() instanceof Inet6Address) { + return "[" + inetSocketAddress.getHostString() + "]:" + inetSocketAddress.getPort(); + } else { + return inetSocketAddress.getHostString() + ":" + inetSocketAddress.getPort(); + } + } + + static class ResponseHandler implements HttpHandler { + + private final AzureStorageTestServer server; + + private ResponseHandler(final AzureStorageTestServer server) { + this.server = server; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + String method = exchange.getRequestMethod(); + String path = server.getEndpoint() + exchange.getRequestURI().getRawPath(); + String query = exchange.getRequestURI().getRawQuery(); + Map> headers = exchange.getRequestHeaders(); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Streams.copy(exchange.getRequestBody(), out); + + final AzureStorageTestServer.Response response = server.handle(method, path, query, headers, out.toByteArray()); + + Map> responseHeaders = exchange.getResponseHeaders(); + responseHeaders.put("Content-Type", singletonList(response.contentType)); + response.headers.forEach((k, v) -> responseHeaders.put(k, singletonList(v))); + exchange.sendResponseHeaders(response.status.getStatus(), response.body.length); + if (response.body.length > 0) { + exchange.getResponseBody().write(response.body); + } + exchange.close(); + } + } +} diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java index 68b84594d62..80035d8f788 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java @@ -25,8 +25,8 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.Streams; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -66,6 +66,8 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS @Override public void deleteFiles(String account, LocationMode mode, String container, String path) { + final Map blobs = listBlobsByPrefix(account, mode, container, path, null); + blobs.keySet().forEach(key -> deleteBlob(account, mode, container, key)); } @Override diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageTestServer.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageTestServer.java new file mode 100644 index 00000000000..584428f9a45 --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageTestServer.java @@ -0,0 +1,425 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.path.PathTrie; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicLong; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; + +/** + * {@link AzureStorageTestServer} emulates an Azure Storage service through a {@link #handle(String, String, String, Map, byte[])} + * method that provides appropriate responses for specific requests like the real Azure platform would do. + * It is based on official documentation available at https://docs.microsoft.com/en-us/rest/api/storageservices/blob-service-rest-api. + */ +public class AzureStorageTestServer { + + private static byte[] EMPTY_BYTE = new byte[0]; + + /** List of the containers stored on this test server **/ + private final Map containers = ConcurrentCollections.newConcurrentMap(); + + /** Request handlers for the requests made by the Azure client **/ + private final PathTrie handlers; + + /** Server endpoint **/ + private final String endpoint; + + /** Increments for the requests ids **/ + private final AtomicLong requests = new AtomicLong(0); + + /** + * Creates a {@link AzureStorageTestServer} with a custom endpoint + */ + AzureStorageTestServer(final String endpoint) { + this.endpoint = Objects.requireNonNull(endpoint, "endpoint must not be null"); + this.handlers = defaultHandlers(endpoint, containers); + } + + /** Creates a container in the test server **/ + void createContainer(final String containerName) { + containers.put(containerName, new Container(containerName)); + } + + public String getEndpoint() { + return endpoint; + } + + /** + * Returns a response for the given request + * + * @param method the HTTP method of the request + * @param path the path of the URL of the request + * @param query the queryString of the URL of request + * @param headers the HTTP headers of the request + * @param body the HTTP request body + * @return a {@link Response} + * @throws IOException if something goes wrong + */ + public Response handle(final String method, + final String path, + final String query, + final Map> headers, + byte[] body) throws IOException { + + final long requestId = requests.incrementAndGet(); + + final Map params = new HashMap<>(); + if (query != null) { + RestUtils.decodeQueryString(query, 0, params); + } + + final RequestHandler handler = handlers.retrieve(method + " " + path, params); + if (handler != null) { + return handler.execute(params, headers, body, requestId); + } else { + return newInternalError(requestId); + } + } + + @FunctionalInterface + interface RequestHandler { + + /** + * Simulates the execution of a Azure Storage request and returns a corresponding response. + * + * @param params the request's query string parameters + * @param headers the request's headers + * @param body the request body provided as a byte array + * @param requestId a unique id for the incoming request + * @return the corresponding response + * + * @throws IOException if something goes wrong + */ + Response execute(Map params, Map> headers, byte[] body, long requestId) throws IOException; + } + + /** Builds the default request handlers **/ + private static PathTrie defaultHandlers(final String endpoint, final Map containers) { + final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); + + // Get Blob Properties + // + // https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + objectsPaths("HEAD " + endpoint + "/{container}").forEach(path -> + handlers.insert(path, (params, headers, body, requestId) -> { + final String containerName = params.get("container"); + + final Container container =containers.get(containerName); + if (container == null) { + return newContainerNotFoundError(requestId); + } + + final String blobName = objectName(params); + for (Map.Entry object : container.objects.entrySet()) { + if (object.getKey().equals(blobName)) { + Map responseHeaders = new HashMap<>(); + responseHeaders.put("x-ms-blob-content-length", String.valueOf(object.getValue().length)); + responseHeaders.put("x-ms-blob-type", "blockblob"); + return new Response(RestStatus.OK, responseHeaders, "text/plain", EMPTY_BYTE); + } + } + return newBlobNotFoundError(requestId); + }) + ); + + // PUT Blob + // + // https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob + objectsPaths("PUT " + endpoint + "/{container}").forEach(path -> + handlers.insert(path, (params, headers, body, requestId) -> { + final String destContainerName = params.get("container"); + + final Container destContainer =containers.get(destContainerName); + if (destContainer == null) { + return newContainerNotFoundError(requestId); + } + + final String destBlobName = objectName(params); + + // Request is a copy request + List headerCopySource = headers.getOrDefault("x-ms-copy-source", emptyList()); + if (headerCopySource.isEmpty() == false) { + String srcBlobName = headerCopySource.get(0); + + Container srcContainer = null; + for (Container container : containers.values()) { + String prefix = endpoint + "/" + container.name + "/"; + if (srcBlobName.startsWith(prefix)) { + srcBlobName = srcBlobName.replaceFirst(prefix, ""); + srcContainer = container; + break; + } + } + + if (srcContainer == null || srcContainer.objects.containsKey(srcBlobName) == false) { + return newBlobNotFoundError(requestId); + } + + byte[] bytes = srcContainer.objects.get(srcBlobName); + if (bytes != null) { + destContainer.objects.put(destBlobName, bytes); + return new Response(RestStatus.ACCEPTED, singletonMap("x-ms-copy-status", "success"), "text/plain", EMPTY_BYTE); + } else { + return newBlobNotFoundError(requestId); + } + } else { + destContainer.objects.put(destBlobName, body); + } + + return new Response(RestStatus.CREATED, emptyMap(), "text/plain", EMPTY_BYTE); + }) + ); + + // GET Object + // + // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html + objectsPaths("GET " + endpoint + "/{container}").forEach(path -> + handlers.insert(path, (params, headers, body, requestId) -> { + final String containerName = params.get("container"); + + final Container container =containers.get(containerName); + if (container == null) { + return newContainerNotFoundError(requestId); + } + + final String blobName = objectName(params); + if (container.objects.containsKey(blobName)) { + Map responseHeaders = new HashMap<>(); + responseHeaders.put("x-ms-copy-status", "success"); + responseHeaders.put("x-ms-blob-type", "blockblob"); + return new Response(RestStatus.OK, responseHeaders, "application/octet-stream", container.objects.get(blobName)); + + } + return newBlobNotFoundError(requestId); + }) + ); + + // Delete Blob + // + // https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + objectsPaths("DELETE " + endpoint + "/{container}").forEach(path -> + handlers.insert(path, (params, headers, body, requestId) -> { + final String containerName = params.get("container"); + + final Container container =containers.get(containerName); + if (container == null) { + return newContainerNotFoundError(requestId); + } + + final String blobName = objectName(params); + if (container.objects.remove(blobName) != null) { + return new Response(RestStatus.ACCEPTED, emptyMap(), "text/plain", EMPTY_BYTE); + } + return newBlobNotFoundError(requestId); + }) + ); + + // List Blobs + // + // https://docs.microsoft.com/en-us/rest/api/storageservices/list-blobs + handlers.insert("GET " + endpoint + "/{container}/", (params, headers, body, requestId) -> { + final String containerName = params.get("container"); + + final Container container =containers.get(containerName); + if (container == null) { + return newContainerNotFoundError(requestId); + } + + final String prefix = params.get("prefix"); + return newEnumerationResultsResponse(requestId, container, prefix); + }); + + // Get Container Properties + // + // https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + handlers.insert("HEAD " + endpoint + "/{container}", (params, headers, body, requestId) -> { + String container = params.get("container"); + if (Strings.hasText(container) && containers.containsKey(container)) { + return new Response(RestStatus.OK, emptyMap(), "text/plain", EMPTY_BYTE); + } else { + return newContainerNotFoundError(requestId); + } + }); + + return handlers; + } + + /** + * Represents a Azure Storage container. + */ + static class Container { + + /** Container name **/ + final String name; + + /** Blobs contained in the container **/ + final Map objects; + + Container(final String name) { + this.name = Objects.requireNonNull(name); + this.objects = ConcurrentCollections.newConcurrentMap(); + } + } + + /** + * Represents a HTTP Response. + */ + static class Response { + + final RestStatus status; + final Map headers; + final String contentType; + final byte[] body; + + Response(final RestStatus status, final Map headers, final String contentType, final byte[] body) { + this.status = Objects.requireNonNull(status); + this.headers = Objects.requireNonNull(headers); + this.contentType = Objects.requireNonNull(contentType); + this.body = Objects.requireNonNull(body); + } + } + + /** + * Decline a path like "http://host:port/{bucket}" into 10 derived paths like: + * - http://host:port/{bucket}/{path0} + * - http://host:port/{bucket}/{path0}/{path1} + * - http://host:port/{bucket}/{path0}/{path1}/{path2} + * - etc + */ + private static List objectsPaths(final String path) { + final List paths = new ArrayList<>(); + String p = path; + for (int i = 0; i < 10; i++) { + p = p + "/{path" + i + "}"; + paths.add(p); + } + return paths; + } + + /** + * Retrieves the object name from all derived paths named {pathX} where 0 <= X < 10. + * + * This is the counterpart of {@link #objectsPaths(String)} + */ + private static String objectName(final Map params) { + final StringBuilder name = new StringBuilder(); + for (int i = 0; i < 10; i++) { + String value = params.getOrDefault("path" + i, null); + if (value != null) { + if (name.length() > 0) { + name.append('/'); + } + name.append(value); + } + } + return name.toString(); + } + + + /** + * Azure EnumerationResults Response + */ + private static Response newEnumerationResultsResponse(final long requestId, final Container container, final String prefix) { + final String id = Long.toString(requestId); + final StringBuilder response = new StringBuilder(); + response.append(""); + response.append(""); + if (prefix != null) { + response.append("").append(prefix).append(""); + } else { + response.append(""); + } + response.append("").append(container.objects.size()).append(""); + response.append(""); + + int count = 0; + for (Map.Entry object : container.objects.entrySet()) { + String objectName = object.getKey(); + if (prefix == null || objectName.startsWith(prefix)) { + response.append(""); + response.append("").append(objectName).append(""); + response.append(""); + response.append("").append(object.getValue().length).append(""); + response.append("").append(count++).append(""); + response.append("success"); + response.append("BlockBlob"); + response.append(""); + response.append(""); + } + } + + response.append(""); + response.append(""); + response.append(""); + + return new Response(RestStatus.OK, singletonMap("x-amz-request-id", id), "application/xml", response.toString().getBytes(UTF_8)); + } + + private static Response newContainerNotFoundError(final long requestId) { + return newError(requestId, RestStatus.NOT_FOUND, "ContainerNotFound", "The specified container does not exist"); + } + + private static Response newBlobNotFoundError(final long requestId) { + return newError(requestId, RestStatus.NOT_FOUND, "BlobNotFound", "The specified blob does not exist"); + } + + private static Response newInternalError(final long requestId) { + return newError(requestId, RestStatus.INTERNAL_SERVER_ERROR, "InternalError", "The server encountered an internal error"); + } + + /** + * Azure Error + * + * https://docs.microsoft.com/en-us/rest/api/storageservices/status-and-error-codes2 + */ + private static Response newError(final long requestId, + final RestStatus status, + final String code, + final String message) { + + final StringBuilder response = new StringBuilder(); + response.append(""); + response.append(""); + response.append("").append(code).append(""); + response.append("").append(message).append(""); + response.append(""); + + final Map headers = new HashMap<>(2); + headers.put("x-ms-request-id", String.valueOf(requestId)); + headers.put("x-ms-error-code", code); + + return new Response(status, headers, "application/xml", response.toString().getBytes(UTF_8)); + } +} diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml index fb929f1e822..25726fa8f9b 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml @@ -1,6 +1,6 @@ -# Integration tests for Azure Repository component +# Integration tests for repository-azure # -"Azure Repository loaded": +"Plugin repository-azure is loaded": - do: cluster.state: {} @@ -11,3 +11,177 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-azure } +--- +"Snapshot/Restore with repository-azure": + + # Register repository + - do: + snapshot.create_repository: + repository: repository + body: + type: azure + settings: + container: "container_test" + client: "integration_test" + + - match: { acknowledged: true } + + # Get repository + - do: + snapshot.get_repository: + repository: repository + + - match: {repository.settings.container : "container_test"} + - match: {repository.settings.client : "integration_test"} + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository + snapshot: snapshot-one + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 7b985ebd176..27736e24dbf 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -24,12 +24,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import java.util.Locale; +import java.util.concurrent.ConcurrentHashMap; public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { @Override protected BlobStore newBlobStore() { String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockStorage.newStorageClient(bucket, getTestName())); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 1a173b44065..19551f3b082 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -27,14 +27,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; -import org.junit.BeforeClass; +import org.junit.AfterClass; -import java.net.SocketPermission; -import java.security.AccessController; import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -42,9 +41,9 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos private static final String BUCKET = "gcs-repository-test"; - // Static storage client shared among all nodes in order to act like a remote repository service: + // Static list of blobs shared among all nodes in order to act like a remote repository service: // all nodes must see the same content - private static final AtomicReference storage = new AtomicReference<>(); + private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); @Override protected Collection> nodePlugins() { @@ -62,15 +61,17 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); } - @BeforeClass - public static void setUpStorage() { - storage.set(MockStorage.newStorageClient(BUCKET, GoogleCloudStorageBlobStoreRepositoryTests.class.getName())); + @AfterClass + public static void wipeRepository() { + blobs.clear(); } public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin { + public MockGoogleCloudStoragePlugin(final Settings settings) { super(settings); } + @Override protected GoogleCloudStorageService createStorageService(Environment environment) { return new MockGoogleCloudStorageService(environment, getClientsSettings()); @@ -85,9 +86,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos @Override public Storage createClient(String clientName) { - // The actual impl might open a connection. So check we have permission when this call is made. - AccessController.checkPermission(new SocketPermission("*", "connect")); - return storage.get(); + return new MockStorage(BUCKET, blobs); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java index 00c0538d198..5e253078052 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java @@ -24,12 +24,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.ESBlobStoreTestCase; import java.util.Locale; +import java.util.concurrent.ConcurrentHashMap; public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { @Override protected BlobStore newBlobStore() { String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockStorage.newStorageClient(bucket, getTestName())); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java index 35606d724cc..31c85d35f3f 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java @@ -22,7 +22,7 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.repositories.gcs.GoogleCloudStorageTestServer.Response; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index a04dae29497..325cea132be 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,74 +19,289 @@ package org.elasticsearch.repositories.gcs; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.http.AbstractInputStreamContent; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.http.HttpMethods; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponseException; import com.google.api.client.http.LowLevelHttpRequest; import com.google.api.client.http.LowLevelHttpResponse; -import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.http.MultipartContent; +import com.google.api.client.json.JsonFactory; +import com.google.api.client.testing.http.MockHttpTransport; import com.google.api.client.testing.http.MockLowLevelHttpRequest; import com.google.api.client.testing.http.MockLowLevelHttpResponse; import com.google.api.services.storage.Storage; +import com.google.api.services.storage.model.Bucket; +import com.google.api.services.storage.model.StorageObject; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.rest.RestStatus; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.Map; +import java.io.InputStream; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.concurrent.ConcurrentMap; + +import static org.mockito.Mockito.mock; /** - * {@link MockStorage} is a utility class that provides {@link Storage} clients that works - * against an embedded {@link GoogleCloudStorageTestServer}. + * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs + * in a given concurrent map. */ -class MockStorage extends com.google.api.client.testing.http.MockHttpTransport { +class MockStorage extends Storage { - /** - * Embedded test server that emulates a Google Cloud Storage service - **/ - private final GoogleCloudStorageTestServer server = new GoogleCloudStorageTestServer(); + /* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */ + private static final String DELETION_HEADER = "x-blob-to-delete"; - private MockStorage() { + private final String bucketName; + private final ConcurrentMap blobs; + + MockStorage(final String bucket, final ConcurrentMap blobs) { + super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class)); + this.bucketName = bucket; + this.blobs = blobs; } @Override - public LowLevelHttpRequest buildRequest(String method, String url) throws IOException { - return new MockLowLevelHttpRequest() { - @Override - public LowLevelHttpResponse execute() throws IOException { - return convert(server.handle(method, url, getHeaders(), getContentAsBytes())); - } - - /** Returns the LowLevelHttpRequest body as an array of bytes **/ - byte[] getContentAsBytes() throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - if (getStreamingContent() != null) { - getStreamingContent().writeTo(out); - } - return out.toByteArray(); - } - }; + public Buckets buckets() { + return new MockBuckets(); } - private static MockLowLevelHttpResponse convert(final GoogleCloudStorageTestServer.Response response) { - final MockLowLevelHttpResponse lowLevelHttpResponse = new MockLowLevelHttpResponse(); - for (Map.Entry header : response.headers.entrySet()) { - lowLevelHttpResponse.addHeader(header.getKey(), header.getValue()); + @Override + public Objects objects() { + return new MockObjects(); + } + + class MockBuckets extends Buckets { + + @Override + public Get get(String getBucket) { + return new Get(getBucket) { + @Override + public Bucket execute() { + if (bucketName.equals(getBucket())) { + Bucket bucket = new Bucket(); + bucket.setId(bucketName); + return bucket; + } else { + return null; + } + } + }; } - lowLevelHttpResponse.setContentType(response.contentType); - lowLevelHttpResponse.setStatusCode(response.status.getStatus()); - lowLevelHttpResponse.setReasonPhrase(response.status.toString()); - if (response.body != null) { - lowLevelHttpResponse.setContent(response.body); - lowLevelHttpResponse.setContentLength(response.body.length); + } + + class MockObjects extends Objects { + + @Override + public Get get(String getBucket, String getObject) { + return new Get(getBucket, getObject) { + @Override + public StorageObject execute() throws IOException { + if (bucketName.equals(getBucket()) == false) { + throw newBucketNotFoundException(getBucket()); + } + if (blobs.containsKey(getObject()) == false) { + throw newObjectNotFoundException(getObject()); + } + + StorageObject storageObject = new StorageObject(); + storageObject.setId(getObject()); + return storageObject; + } + + @Override + public InputStream executeMediaAsInputStream() throws IOException { + if (bucketName.equals(getBucket()) == false) { + throw newBucketNotFoundException(getBucket()); + } + if (blobs.containsKey(getObject()) == false) { + throw newObjectNotFoundException(getObject()); + } + return new ByteArrayInputStream(blobs.get(getObject())); + } + }; } - return lowLevelHttpResponse; + + @Override + public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) { + return new Insert(insertBucket, insertObject) { + @Override + public StorageObject execute() throws IOException { + if (bucketName.equals(getBucket()) == false) { + throw newBucketNotFoundException(getBucket()); + } + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Streams.copy(insertStream.getInputStream(), out); + blobs.put(getName(), out.toByteArray()); + return null; + } + }; + } + + @Override + public List list(String listBucket) { + return new List(listBucket) { + @Override + public com.google.api.services.storage.model.Objects execute() throws IOException { + if (bucketName.equals(getBucket()) == false) { + throw newBucketNotFoundException(getBucket()); + } + + final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); + + final java.util.List storageObjects = new ArrayList<>(); + for (Entry blob : blobs.entrySet()) { + if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) { + StorageObject storageObject = new StorageObject(); + storageObject.setId(blob.getKey()); + storageObject.setName(blob.getKey()); + storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); + storageObjects.add(storageObject); + } + } + + objects.setItems(storageObjects); + return objects; + } + }; + } + + @Override + public Delete delete(String deleteBucket, String deleteObject) { + return new Delete(deleteBucket, deleteObject) { + @Override + public Void execute() throws IOException { + if (bucketName.equals(getBucket()) == false) { + throw newBucketNotFoundException(getBucket()); + } + + if (blobs.containsKey(getObject()) == false) { + throw newObjectNotFoundException(getObject()); + } + + blobs.remove(getObject()); + return null; + } + + @Override + public HttpRequest buildHttpRequest() throws IOException { + HttpRequest httpRequest = super.buildHttpRequest(); + httpRequest.getHeaders().put(DELETION_HEADER, getObject()); + return httpRequest; + } + }; + } + + @Override + public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) { + return new Copy(srcBucket, srcObject, destBucket, destObject, content) { + @Override + public StorageObject execute() throws IOException { + if (bucketName.equals(getSourceBucket()) == false) { + throw newBucketNotFoundException(getSourceBucket()); + } + if (bucketName.equals(getDestinationBucket()) == false) { + throw newBucketNotFoundException(getDestinationBucket()); + } + + final byte[] bytes = blobs.get(getSourceObject()); + if (bytes == null) { + throw newObjectNotFoundException(getSourceObject()); + } + blobs.put(getDestinationObject(), bytes); + + StorageObject storageObject = new StorageObject(); + storageObject.setId(getDestinationObject()); + return storageObject; + } + }; + } + } + + private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { + HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); + return new GoogleJsonResponseException(builder, new GoogleJsonError()); + } + + private static GoogleJsonResponseException newObjectNotFoundException(final String object) { + HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); + return new GoogleJsonResponseException(builder, new GoogleJsonError()); } /** - * Instanciates a mocked Storage client for tests. + * {@link MockedHttpTransport} extends the existing testing transport to analyze the content + * of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates + * blobs. We use this because {@link Storage#batch()} is final and there is no other way to + * extend batch requests for testing purposes. */ - public static Storage newStorageClient(final String bucket, final String applicationName) { - MockStorage mockStorage = new MockStorage(); - mockStorage.server.createBucket(bucket); + static class MockedHttpTransport extends MockHttpTransport { - return new Storage.Builder(mockStorage, JacksonFactory.getDefaultInstance(), null) - .setApplicationName(applicationName) - .build(); + private final ConcurrentMap blobs; + + MockedHttpTransport(final ConcurrentMap blobs) { + this.blobs = blobs; + } + + @Override + public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { + // We analyze the content of the Batch request to detect our custom HTTP header, + // and extract from it the name of the blob to delete. Then we reply a simple + // batch response so that the client parser is happy. + // + // See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the + // format of the batch request body. + if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) { + return new MockLowLevelHttpRequest() { + @Override + public LowLevelHttpResponse execute() throws IOException { + final String contentType = new MultipartContent().getType(); + + final StringBuilder builder = new StringBuilder(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + getStreamingContent().writeTo(out); + + Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { + if (line != null && line.startsWith(DELETION_HEADER)) { + builder.append("--__END_OF_PART__\r\n"); + builder.append("Content-Type: application/http").append("\r\n"); + builder.append("\r\n"); + builder.append("HTTP/1.1 "); + + final String blobName = line.substring(line.indexOf(':') + 1).trim(); + if (blobs.containsKey(blobName)) { + builder.append(RestStatus.OK.getStatus()); + blobs.remove(blobName); + } else { + builder.append(RestStatus.NOT_FOUND.getStatus()); + } + builder.append("\r\n"); + builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n"); + builder.append("Content-Length: 0").append("\r\n"); + builder.append("\r\n"); + } + }); + builder.append("\r\n"); + builder.append("--__END_OF_PART__--"); + } + + MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); + response.setStatusCode(200); + response.setContent(builder.toString()); + response.setContentType(contentType); + return response; + } + }; + } else { + return super.buildRequest(method, url); + } + } } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 51bb6f2024c..09d9782aa91 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -151,8 +151,7 @@ class S3Repository extends BlobStoreRepository { /** * Constructs an s3 backed repository */ - S3Repository(RepositoryMetaData metadata, Settings settings, - NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) throws IOException { + S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) { super(metadata, settings, namedXContentRegistry); String bucket = BUCKET_SETTING.get(metadata.settings()); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java index a090fdd5281..caa1c0b467e 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java @@ -20,14 +20,14 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonClientException; -import com.amazonaws.AmazonServiceException; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AbstractAmazonS3; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.CopyObjectRequest; import com.amazonaws.services.s3.model.CopyObjectResult; import com.amazonaws.services.s3.model.DeleteObjectRequest; -import com.amazonaws.services.s3.model.GetObjectMetadataRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; +import com.amazonaws.services.s3.model.DeleteObjectsResult; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; @@ -37,197 +37,163 @@ import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.services.s3.model.S3ObjectSummary; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.InputStream; -import java.io.UncheckedIOException; -import java.net.InetAddress; -import java.net.Socket; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; +import java.util.Objects; +import java.util.concurrent.ConcurrentMap; -import static org.junit.Assert.assertTrue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; class MockAmazonS3 extends AbstractAmazonS3 { - private final int mockSocketPort; + private final ConcurrentMap blobs; + private final String bucket; + private final boolean serverSideEncryption; + private final String cannedACL; + private final String storageClass; - private Map blobs = new ConcurrentHashMap<>(); - - // in ESBlobStoreContainerTestCase.java, the maximum - // length of the input data is 100 bytes - private byte[] byteCounter = new byte[100]; - - - MockAmazonS3(int mockSocketPort) { - this.mockSocketPort = mockSocketPort; - } - - // Simulate a socket connection to check that SocketAccess.doPrivileged() is used correctly. - // Any method of AmazonS3 might potentially open a socket to the S3 service. Firstly, a call - // to any method of AmazonS3 has to be wrapped by SocketAccess.doPrivileged(). - // Secondly, each method on the stack from doPrivileged to opening the socket has to be - // located in a jar that is provided by the plugin. - // Thirdly, a SocketPermission has to be configured in plugin-security.policy. - // By opening a socket in each method of MockAmazonS3 it is ensured that in production AmazonS3 - // is able to to open a socket to the S3 Service without causing a SecurityException - private void simulateS3SocketConnection() { - try (Socket socket = new Socket(InetAddress.getByName("127.0.0.1"), mockSocketPort)) { - assertTrue(socket.isConnected()); // NOOP to keep static analysis happy - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - - @Override - public boolean doesBucketExist(String bucket) { - return true; + MockAmazonS3(final ConcurrentMap blobs, + final String bucket, + final boolean serverSideEncryption, + final String cannedACL, + final String storageClass) { + this.blobs = Objects.requireNonNull(blobs); + this.bucket = Objects.requireNonNull(bucket); + this.serverSideEncryption = serverSideEncryption; + this.cannedACL = cannedACL; + this.storageClass = storageClass; } @Override - public boolean doesObjectExist(String bucketName, String objectName) throws AmazonServiceException, SdkClientException { - simulateS3SocketConnection(); + public boolean doesBucketExist(final String bucket) { + return this.bucket.equalsIgnoreCase(bucket); + } + + @Override + public boolean doesObjectExist(final String bucketName, final String objectName) throws SdkClientException { + assertThat(bucketName, equalTo(bucket)); return blobs.containsKey(objectName); } @Override - public ObjectMetadata getObjectMetadata( - GetObjectMetadataRequest getObjectMetadataRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - String blobName = getObjectMetadataRequest.getKey(); + public PutObjectResult putObject(final PutObjectRequest request) throws AmazonClientException { + assertThat(request.getBucketName(), equalTo(bucket)); + assertThat(request.getMetadata().getSSEAlgorithm(), serverSideEncryption ? equalTo("AES256") : nullValue()); + assertThat(request.getCannedAcl(), notNullValue()); + assertThat(request.getCannedAcl().toString(), cannedACL != null ? equalTo(cannedACL) : equalTo("private")); + assertThat(request.getStorageClass(), storageClass != null ? equalTo(storageClass) : equalTo("STANDARD")); - if (!blobs.containsKey(blobName)) { - throw new AmazonS3Exception("[" + blobName + "] does not exist."); + + final String blobName = request.getKey(); + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + try { + Streams.copy(request.getInputStream(), out); + blobs.put(blobName, out.toByteArray()); + } catch (IOException e) { + throw new AmazonClientException(e); } - - return new ObjectMetadata(); // nothing is done with it - } - - @Override - public PutObjectResult putObject(PutObjectRequest putObjectRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - String blobName = putObjectRequest.getKey(); - - if (blobs.containsKey(blobName)) { - throw new AmazonS3Exception("[" + blobName + "] already exists."); - } - - blobs.put(blobName, putObjectRequest.getInputStream()); return new PutObjectResult(); } @Override - public S3Object getObject(GetObjectRequest getObjectRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - // in ESBlobStoreContainerTestCase.java, the prefix is empty, - // so the key and blobName are equivalent to each other - String blobName = getObjectRequest.getKey(); + public S3Object getObject(final GetObjectRequest request) throws AmazonClientException { + assertThat(request.getBucketName(), equalTo(bucket)); - if (!blobs.containsKey(blobName)) { - throw new AmazonS3Exception("[" + blobName + "] does not exist."); + final String blobName = request.getKey(); + final byte[] content = blobs.get(blobName); + if (content == null) { + AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist."); + exception.setStatusCode(404); + throw exception; } - // the HTTP request attribute is irrelevant for reading - S3ObjectInputStream stream = new S3ObjectInputStream( - blobs.get(blobName), null, false); + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(content.length); + S3Object s3Object = new S3Object(); - s3Object.setObjectContent(stream); + s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(content), null, false)); + s3Object.setKey(blobName); + s3Object.setObjectMetadata(metadata); + return s3Object; } @Override - public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - MockObjectListing list = new MockObjectListing(); - list.setTruncated(false); + public ObjectListing listObjects(final ListObjectsRequest request) throws AmazonClientException { + assertThat(request.getBucketName(), equalTo(bucket)); - String blobName; - String prefix = listObjectsRequest.getPrefix(); + final ObjectListing listing = new ObjectListing(); + listing.setBucketName(request.getBucketName()); + listing.setPrefix(request.getPrefix()); - ArrayList mockObjectSummaries = new ArrayList<>(); - - for (Map.Entry blob : blobs.entrySet()) { - blobName = blob.getKey(); - S3ObjectSummary objectSummary = new S3ObjectSummary(); - - if (prefix.isEmpty() || blobName.startsWith(prefix)) { - objectSummary.setKey(blobName); - - try { - objectSummary.setSize(getSize(blob.getValue())); - } catch (IOException e) { - throw new AmazonS3Exception("Object listing " + - "failed for blob [" + blob.getKey() + "]"); - } - - mockObjectSummaries.add(objectSummary); + for (Map.Entry blob : blobs.entrySet()) { + if (Strings.isEmpty(request.getPrefix()) || blob.getKey().startsWith(request.getPrefix())) { + S3ObjectSummary summary = new S3ObjectSummary(); + summary.setBucketName(request.getBucketName()); + summary.setKey(blob.getKey()); + summary.setSize(blob.getValue().length); + listing.getObjectSummaries().add(summary); } } - - list.setObjectSummaries(mockObjectSummaries); - return list; + return listing; } @Override - public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - String sourceBlobName = copyObjectRequest.getSourceKey(); - String targetBlobName = copyObjectRequest.getDestinationKey(); + public CopyObjectResult copyObject(final CopyObjectRequest request) throws AmazonClientException { + assertThat(request.getSourceBucketName(), equalTo(bucket)); + assertThat(request.getDestinationBucketName(), equalTo(bucket)); - if (!blobs.containsKey(sourceBlobName)) { - throw new AmazonS3Exception("Source blob [" + - sourceBlobName + "] does not exist."); + final String sourceBlobName = request.getSourceKey(); + + final byte[] content = blobs.get(sourceBlobName); + if (content == null) { + AmazonS3Exception exception = new AmazonS3Exception("[" + sourceBlobName + "] does not exist."); + exception.setStatusCode(404); + throw exception; } - if (blobs.containsKey(targetBlobName)) { - throw new AmazonS3Exception("Target blob [" + - targetBlobName + "] already exists."); - } - - blobs.put(targetBlobName, blobs.get(sourceBlobName)); - return new CopyObjectResult(); // nothing is done with it + blobs.put(request.getDestinationKey(), content); + return new CopyObjectResult(); } @Override - public void deleteObject(DeleteObjectRequest deleteObjectRequest) - throws AmazonClientException, AmazonServiceException { - simulateS3SocketConnection(); - String blobName = deleteObjectRequest.getKey(); + public void deleteObject(final DeleteObjectRequest request) throws AmazonClientException { + assertThat(request.getBucketName(), equalTo(bucket)); - if (!blobs.containsKey(blobName)) { - throw new AmazonS3Exception("[" + blobName + "] does not exist."); + final String blobName = request.getKey(); + if (blobs.remove(blobName) == null) { + AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist."); + exception.setStatusCode(404); + throw exception; } - - blobs.remove(blobName); } - private int getSize(InputStream stream) throws IOException { - int size = stream.read(byteCounter); - stream.reset(); // in case we ever need the size again - return size; - } + @Override + public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws SdkClientException { + assertThat(request.getBucketName(), equalTo(bucket)); - private class MockObjectListing extends ObjectListing { - // the objectSummaries attribute in ObjectListing.java - // is read-only, but we need to be able to write to it, - // so we create a mock of it to work around this - private List mockObjectSummaries; - - @Override - public List getObjectSummaries() { - return mockObjectSummaries; - } - - private void setObjectSummaries(List objectSummaries) { - mockObjectSummaries = objectSummaries; + final List deletions = new ArrayList<>(); + for (DeleteObjectsRequest.KeyVersion key : request.getKeys()) { + if (blobs.remove(key.getKey()) == null) { + AmazonS3Exception exception = new AmazonS3Exception("[" + key + "] does not exist."); + exception.setStatusCode(404); + throw exception; + } else { + DeleteObjectsResult.DeletedObject deletion = new DeleteObjectsResult.DeletedObject(); + deletion.setKey(key.getKey()); + deletions.add(deletion); + } } + return new DeleteObjectsResult(deletions); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index 5998540e7a8..453ef3213f0 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -37,26 +37,19 @@ import com.amazonaws.services.s3.model.UploadPartResult; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; import org.mockito.ArgumentCaptor; import java.io.ByteArrayInputStream; import java.io.IOException; -import java.net.InetAddress; -import java.net.ServerSocket; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Locale; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.repositories.s3.S3BlobStoreTests.randomMockS3BlobStore; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doNothing; @@ -67,36 +60,11 @@ import static org.mockito.Mockito.when; public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { - private static ServerSocket mockS3ServerSocket; - - private static Thread mockS3AcceptorThread; - - // Opens a MockSocket to simulate connections to S3 checking that SocketPermissions are set up correctly. - // See MockAmazonS3.simulateS3SocketConnection. - @BeforeClass - public static void openMockSocket() throws IOException { - mockS3ServerSocket = new MockServerSocket(0, 50, InetAddress.getByName("127.0.0.1")); - mockS3AcceptorThread = new Thread(() -> { - while (!mockS3ServerSocket.isClosed()) { - try { - // Accept connections from MockAmazonS3. - mockS3ServerSocket.accept(); - } catch (IOException e) { - } - } - }); - mockS3AcceptorThread.start(); + protected BlobStore newBlobStore() { + return randomMockS3BlobStore(); } - protected BlobStore newBlobStore() throws IOException { - MockAmazonS3 client = new MockAmazonS3(mockS3ServerSocket.getLocalPort()); - String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - - return new S3BlobStore(Settings.EMPTY, client, bucket, false, - new ByteSizeValue(10, ByteSizeUnit.MB), "public-read-write", "standard"); - } - - public void testExecuteSingleUploadBlobSizeTooLarge() throws IOException { + public void testExecuteSingleUploadBlobSizeTooLarge() { final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10)); final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); @@ -106,7 +74,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); } - public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() throws IOException { + public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() { final S3BlobStore blobStore = mock(S3BlobStore.class); when(blobStore.bufferSizeInBytes()).thenReturn(ByteSizeUnit.MB.toBytes(1)); @@ -168,7 +136,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { } } - public void testExecuteMultipartUploadBlobSizeTooLarge() throws IOException { + public void testExecuteMultipartUploadBlobSizeTooLarge() { final long blobSize = ByteSizeUnit.TB.toBytes(randomIntBetween(6, 10)); final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); @@ -179,7 +147,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); } - public void testExecuteMultipartUploadBlobSizeTooSmall() throws IOException { + public void testExecuteMultipartUploadBlobSizeTooSmall() { final long blobSize = ByteSizeUnit.MB.toBytes(randomIntBetween(1, 4)); final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); @@ -291,7 +259,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { assertEquals(expectedEtags, actualETags); } - public void testExecuteMultipartUploadAborted() throws IOException { + public void testExecuteMultipartUploadAborted() { final String bucketName = randomAlphaOfLengthBetween(1, 10); final String blobName = randomAlphaOfLengthBetween(1, 10); final BlobPath blobPath = new BlobPath(); @@ -418,12 +386,4 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1()); assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2()); } - - @AfterClass - public static void closeMockSocket() throws IOException, InterruptedException { - mockS3ServerSocket.close(); - mockS3AcceptorThread.join(); - mockS3AcceptorThread = null; - mockS3ServerSocket = null; - } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java new file mode 100644 index 00000000000..e3e89c41514 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.s3; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.StorageClass; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.util.Collection; +import java.util.Collections; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase { + + private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); + private static String bucket; + private static String client; + private static ByteSizeValue bufferSize; + private static boolean serverSideEncryption; + private static String cannedACL; + private static String storageClass; + + @BeforeClass + public static void setUpRepositorySettings() { + bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + client = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + bufferSize = new ByteSizeValue(randomIntBetween(5, 50), ByteSizeUnit.MB); + serverSideEncryption = randomBoolean(); + if (randomBoolean()) { + cannedACL = randomFrom(CannedAccessControlList.values()).toString(); + } + if (randomBoolean()) { + storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString(); + } + } + + @AfterClass + public static void wipeRepository() { + blobs.clear(); + } + + @Override + protected void createTestRepository(final String name) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType(S3Repository.TYPE) + .setSettings(Settings.builder() + .put(S3Repository.BUCKET_SETTING.getKey(), bucket) + .put(InternalAwsS3Service.CLIENT_NAME.getKey(), client) + .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize) + .put(S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), serverSideEncryption) + .put(S3Repository.CANNED_ACL_SETTING.getKey(), cannedACL) + .put(S3Repository.STORAGE_CLASS_SETTING.getKey(), storageClass))); + } + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(TestS3RepositoryPlugin.class); + } + + public static class TestS3RepositoryPlugin extends S3RepositoryPlugin { + + public TestS3RepositoryPlugin(final Settings settings) { + super(settings); + } + + @Override + public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { + return Collections.singletonMap(S3Repository.TYPE, (metadata) -> + new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings(), emptyMap()) { + @Override + public synchronized AmazonS3 client(final Settings repositorySettings) { + return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass); + } + })); + } + } +} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java index 17bea5239fe..4a23e4efa9a 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java @@ -19,18 +19,29 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.StorageClass; +import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; -import org.elasticsearch.repositories.s3.S3BlobStore; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.repositories.ESBlobStoreTestCase; -import java.io.IOException; +import java.util.Locale; +import java.util.concurrent.ConcurrentHashMap; import static org.hamcrest.Matchers.equalTo; -public class S3BlobStoreTests extends ESTestCase { - public void testInitCannedACL() throws IOException { +public class S3BlobStoreTests extends ESBlobStoreTestCase { + + @Override + protected BlobStore newBlobStore() { + return randomMockS3BlobStore(); + } + + public void testInitCannedACL() { String[] aclList = new String[]{ "private", "public-read", "public-read-write", "authenticated-read", "log-delivery-write", "bucket-owner-read", "bucket-owner-full-control"}; @@ -52,16 +63,12 @@ public class S3BlobStoreTests extends ESTestCase { } } - public void testInvalidCannedACL() throws IOException { - try { - S3BlobStore.initCannedACL("test_invalid"); - fail("CannedACL should fail"); - } catch (BlobStoreException ex) { - assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]")); - } + public void testInvalidCannedACL() { + BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initCannedACL("test_invalid")); + assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]")); } - public void testInitStorageClass() throws IOException { + public void testInitStorageClass() { // it should default to `standard` assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard)); assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard)); @@ -72,25 +79,43 @@ public class S3BlobStoreTests extends ESTestCase { assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy)); } - public void testCaseInsensitiveStorageClass() throws IOException { + public void testCaseInsensitiveStorageClass() { assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard)); assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess)); assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy)); } - public void testInvalidStorageClass() throws IOException { - try { - S3BlobStore.initStorageClass("whatever"); - } catch(BlobStoreException ex) { - assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class.")); - } + public void testInvalidStorageClass() { + BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("whatever")); + assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class.")); } - public void testRejectGlacierStorageClass() throws IOException { - try { - S3BlobStore.initStorageClass("glacier"); - } catch(BlobStoreException ex) { - assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported")); + public void testRejectGlacierStorageClass() { + BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("glacier")); + assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported")); + } + + /** + * Creates a new {@link S3BlobStore} with random settings. + *

+ * The blobstore uses a {@link MockAmazonS3} client. + */ + public static S3BlobStore randomMockS3BlobStore() { + String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + ByteSizeValue bufferSize = new ByteSizeValue(randomIntBetween(5, 100), ByteSizeUnit.MB); + boolean serverSideEncryption = randomBoolean(); + + String cannedACL = null; + if (randomBoolean()) { + cannedACL = randomFrom(CannedAccessControlList.values()).toString(); } + + String storageClass = null; + if (randomBoolean()) { + storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString(); + } + + AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); + return new S3BlobStore(Settings.EMPTY, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } } diff --git a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java index 7014b5b5e64..f3e03f006c5 100644 --- a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java +++ b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java @@ -100,13 +100,12 @@ public class QueryBuilderBWCIT extends ESRestTestCase { new MatchPhraseQueryBuilder("keyword_field", "value").slop(3) ); addCandidate("\"range\": { \"long_field\": {\"gte\": 1, \"lte\": 9}}", new RangeQueryBuilder("long_field").from(1).to(9)); - // bug url https://github.com/elastic/elasticsearch/issues/29376 - /*addCandidate( + addCandidate( "\"bool\": { \"must_not\": [{\"match_all\": {}}], \"must\": [{\"match_all\": {}}], " + "\"filter\": [{\"match_all\": {}}], \"should\": [{\"match_all\": {}}]}", new BoolQueryBuilder().mustNot(new MatchAllQueryBuilder()).must(new MatchAllQueryBuilder()) .filter(new MatchAllQueryBuilder()).should(new MatchAllQueryBuilder()) - );*/ + ); addCandidate( "\"dis_max\": {\"queries\": [{\"match_all\": {}},{\"match_all\": {}},{\"match_all\": {}}], \"tie_breaker\": 0.01}", new DisMaxQueryBuilder().add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder()) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json index 51798c92bab..5c9cebf7411 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json @@ -1,26 +1,36 @@ -{ - "rank_eval": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html", - "methods": ["POST"], - "url": { - "path": "/_rank_eval", - "paths": ["/_rank_eval", "/{index}/_rank_eval", "/{index}/{type}/_rank_eval"], +{ + "rank_eval": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html", + "methods": ["GET", "POST"], + "url": { + "path": "/_rank_eval", + "paths": ["/_rank_eval", "/{index}/_rank_eval"], "parts": { "index": { "type": "list", "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" } }, - "params": {} + "params": { + "ignore_unavailable": { + "type" : "boolean", + "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type" : "boolean", + "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards": { + "type" : "enum", + "options" : ["open","closed","none","all"], + "default" : "open", + "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + } + } }, - "body": { - "description": "The search definition using the Query DSL and the prototype for the eval request.", - "required": true - } - } + "body": { + "description": "The ranking evaluation search definition, including search requests, document ratings and ranking metric definition.", + "required": true + } + } } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml index aff30d17de1..6ad8166a6a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml @@ -281,7 +281,7 @@ setup: sigma: -1 - do: - catch: /parsing_exception/ + catch: /x_content_parse_exception/ search: body: aggs: diff --git a/server/licenses/lucene-analyzers-common-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-analyzers-common-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index c167b717385..00000000000 --- a/server/licenses/lucene-analyzers-common-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a731424734fd976b409f1963ba88471caccc18aa \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1 new file mode 100644 index 00000000000..5a50f9dd77f --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.3.0.jar.sha1 @@ -0,0 +1 @@ +4325a5cdf8d3fa23f326cd86a2297fee2bc844f5 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-backward-codecs-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index cdaec87d35b..00000000000 --- a/server/licenses/lucene-backward-codecs-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5f8ad8c3f8c404803aa81a43ac6f732e19c00935 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1 new file mode 100644 index 00000000000..309f301ad8c --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.3.0.jar.sha1 @@ -0,0 +1 @@ +3b618a21a924cb35ac1f27d3ca47d9ed04f43588 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-core-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index ecb3bb28e23..00000000000 --- a/server/licenses/lucene-core-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -19b1a1fff6bb077e0660e4f0666807e24dd26865 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.3.0.jar.sha1 b/server/licenses/lucene-core-7.3.0.jar.sha1 new file mode 100644 index 00000000000..e12c932b38d --- /dev/null +++ b/server/licenses/lucene-core-7.3.0.jar.sha1 @@ -0,0 +1 @@ +040e2de30c5e6bad868b144e371730200719ceb3 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-grouping-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 03f9bf1a4c8..00000000000 --- a/server/licenses/lucene-grouping-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94dd26d685ae981905b775780e6c824f723b14af \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.3.0.jar.sha1 b/server/licenses/lucene-grouping-7.3.0.jar.sha1 new file mode 100644 index 00000000000..703384a64de --- /dev/null +++ b/server/licenses/lucene-grouping-7.3.0.jar.sha1 @@ -0,0 +1 @@ +20a5c472a8be9bec7aa40472791389e875b9e1f2 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-highlighter-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 32327ca414d..00000000000 --- a/server/licenses/lucene-highlighter-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9783a0bb56fb8bbd17280d3def97a656999f6a88 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.3.0.jar.sha1 b/server/licenses/lucene-highlighter-7.3.0.jar.sha1 new file mode 100644 index 00000000000..6e38e256063 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.3.0.jar.sha1 @@ -0,0 +1 @@ +1f92c7d3d9bc2765fe6195bcc4fcb160d11175cc \ No newline at end of file diff --git a/server/licenses/lucene-join-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-join-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 6b521d7de7f..00000000000 --- a/server/licenses/lucene-join-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -01eda74d798af85f846ebd74f53ec7a16e6e2ba1 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.3.0.jar.sha1 b/server/licenses/lucene-join-7.3.0.jar.sha1 new file mode 100644 index 00000000000..d7213d76a62 --- /dev/null +++ b/server/licenses/lucene-join-7.3.0.jar.sha1 @@ -0,0 +1 @@ +da4af75a7e4fe7843fbfa4b58e6a238b6b706d64 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-memory-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 6bfaf1c715f..00000000000 --- a/server/licenses/lucene-memory-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -29b8b6324722dc6dda784731e3e918de9715422c \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.3.0.jar.sha1 b/server/licenses/lucene-memory-7.3.0.jar.sha1 new file mode 100644 index 00000000000..6bb4a4d832d --- /dev/null +++ b/server/licenses/lucene-memory-7.3.0.jar.sha1 @@ -0,0 +1 @@ +fc45b02a5086ec454e6d6ae81fc2cbe7be1c0902 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-misc-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 74d01520b64..00000000000 --- a/server/licenses/lucene-misc-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1ae49522164a721d67459e59792db6f4dff70fc \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.3.0.jar.sha1 b/server/licenses/lucene-misc-7.3.0.jar.sha1 new file mode 100644 index 00000000000..43c777150a3 --- /dev/null +++ b/server/licenses/lucene-misc-7.3.0.jar.sha1 @@ -0,0 +1 @@ +b6a2418a94b84c29c4b9fcfe4381f2cc1aa4c214 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-queries-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 172a57bed49..00000000000 --- a/server/licenses/lucene-queries-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87595367717ddc9fbf95bbf649216a5d7954d9d7 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.3.0.jar.sha1 b/server/licenses/lucene-queries-7.3.0.jar.sha1 new file mode 100644 index 00000000000..b0ef2b4d0eb --- /dev/null +++ b/server/licenses/lucene-queries-7.3.0.jar.sha1 @@ -0,0 +1 @@ +6292a5579a6ab3423ceca60d2ea41cd86481e7c0 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-queryparser-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index ac6aec921a3..00000000000 --- a/server/licenses/lucene-queryparser-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5befbb58ef76c79fc8afebbca781b01320b8ffad \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.3.0.jar.sha1 b/server/licenses/lucene-queryparser-7.3.0.jar.sha1 new file mode 100644 index 00000000000..87a1d74498d --- /dev/null +++ b/server/licenses/lucene-queryparser-7.3.0.jar.sha1 @@ -0,0 +1 @@ +95b2563e5337377dde2eb987b3fce144be5e7a77 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-sandbox-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 412b072e09d..00000000000 --- a/server/licenses/lucene-sandbox-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3d7aa72ccec38ef902b149da36548fb227eeb58a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.3.0.jar.sha1 b/server/licenses/lucene-sandbox-7.3.0.jar.sha1 new file mode 100644 index 00000000000..605263a2296 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.3.0.jar.sha1 @@ -0,0 +1 @@ +1efd2fa7cba1e359e3fbb8b4c11cab37024b2178 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-spatial-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 5c8d749cf97..00000000000 --- a/server/licenses/lucene-spatial-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ac1755a69f14c53f7846ef7d9b405d44caf53091 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.3.0.jar.sha1 b/server/licenses/lucene-spatial-7.3.0.jar.sha1 new file mode 100644 index 00000000000..4fcd32b5d29 --- /dev/null +++ b/server/licenses/lucene-spatial-7.3.0.jar.sha1 @@ -0,0 +1 @@ +93512c2160bdc3e602141329e5945a91918b6752 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-spatial-extras-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 09e57350f1c..00000000000 --- a/server/licenses/lucene-spatial-extras-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d2fa5db0ce9fb5a1b4e9f18d818b14e082ef5a0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1 new file mode 100644 index 00000000000..0f078420cdb --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.3.0.jar.sha1 @@ -0,0 +1 @@ +47090d8ddf99f6bbb64ee8ab7a76c3cd3165b88f \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-spatial3d-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index e59ab0d054d..00000000000 --- a/server/licenses/lucene-spatial3d-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -99aefdef8178e54f93b743452c5d36bf7e8b3a2d \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.3.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.3.0.jar.sha1 new file mode 100644 index 00000000000..268ed39a784 --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.3.0.jar.sha1 @@ -0,0 +1 @@ +ed8f07d67445d5acde6597996461640b2d92fa08 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.3.0-snapshot-98a6b3d.jar.sha1 b/server/licenses/lucene-suggest-7.3.0-snapshot-98a6b3d.jar.sha1 deleted file mode 100644 index 805298afb19..00000000000 --- a/server/licenses/lucene-suggest-7.3.0-snapshot-98a6b3d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6257a8a1860ec5f57439c420637d5f20bab124ae \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.3.0.jar.sha1 b/server/licenses/lucene-suggest-7.3.0.jar.sha1 new file mode 100644 index 00000000000..798238ce58b --- /dev/null +++ b/server/licenses/lucene-suggest-7.3.0.jar.sha1 @@ -0,0 +1 @@ +6034ccf6b27c659ab7a2678680bae8390fbfc40a \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/action/GenericAction.java b/server/src/main/java/org/elasticsearch/action/GenericAction.java index 7b54f2f6836..6220a1b2062 100644 --- a/server/src/main/java/org/elasticsearch/action/GenericAction.java +++ b/server/src/main/java/org/elasticsearch/action/GenericAction.java @@ -57,7 +57,7 @@ public abstract class GenericAction { @Override public boolean equals(Object obj) { - return Arrays.equals(attributes, ((AttributesKey) obj).attributes); + return obj instanceof AttributesKey && Arrays.equals(attributes, ((AttributesKey) obj).attributes); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java index bb45ca66956..391ef8c67c8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java @@ -51,6 +51,7 @@ public class PlainShardIterator extends PlainShardsIterator implements ShardIter @Override public boolean equals(Object o) { if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; ShardIterator that = (ShardIterator) o; return shardId.equals(that.shardId()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java index 89829426427..f2df6d3196d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java @@ -55,13 +55,13 @@ public class NodeVersionAllocationDecider extends AllocationDecider { } } else { // relocating primary, only migrate to newer host - return isVersionCompatible(allocation.routingNodes(), shardRouting.currentNodeId(), node, allocation); + return isVersionCompatibleRelocatePrimary(allocation.routingNodes(), shardRouting.currentNodeId(), node, allocation); } } else { final ShardRouting primary = allocation.routingNodes().activePrimary(shardRouting.shardId()); // check that active primary has a newer version so that peer recovery works if (primary != null) { - return isVersionCompatible(allocation.routingNodes(), primary.currentNodeId(), node, allocation); + return isVersionCompatibleAllocatingReplica(allocation.routingNodes(), primary.currentNodeId(), node, allocation); } else { // ReplicaAfterPrimaryActiveAllocationDecider should prevent this case from occurring return allocation.decision(Decision.YES, NAME, "no active primary shard yet"); @@ -69,30 +69,45 @@ public class NodeVersionAllocationDecider extends AllocationDecider { } } - private Decision isVersionCompatible(final RoutingNodes routingNodes, final String sourceNodeId, final RoutingNode target, - RoutingAllocation allocation) { + private Decision isVersionCompatibleRelocatePrimary(final RoutingNodes routingNodes, final String sourceNodeId, + final RoutingNode target, final RoutingAllocation allocation) { + final RoutingNode source = routingNodes.node(sourceNodeId); + if (target.node().getVersion().onOrAfter(source.node().getVersion())) { + return allocation.decision(Decision.YES, NAME, + "can relocate primary shard from a node with version [%s] to a node with equal-or-newer version [%s]", + source.node().getVersion(), target.node().getVersion()); + } else { + return allocation.decision(Decision.NO, NAME, + "cannot relocate primary shard from a node with version [%s] to a node with older version [%s]", + source.node().getVersion(), target.node().getVersion()); + } + } + + private Decision isVersionCompatibleAllocatingReplica(final RoutingNodes routingNodes, final String sourceNodeId, + final RoutingNode target, final RoutingAllocation allocation) { final RoutingNode source = routingNodes.node(sourceNodeId); if (target.node().getVersion().onOrAfter(source.node().getVersion())) { /* we can allocate if we can recover from a node that is younger or on the same version * if the primary is already running on a newer version that won't work due to possible * differences in the lucene index format etc.*/ - return allocation.decision(Decision.YES, NAME, "target node version [%s] is the same or newer than source node version [%s]", - target.node().getVersion(), source.node().getVersion()); + return allocation.decision(Decision.YES, NAME, + "can allocate replica shard to a node with version [%s] since this is equal-or-newer than the primary version [%s]", + target.node().getVersion(), source.node().getVersion()); } else { - return allocation.decision(Decision.NO, NAME, "target node version [%s] is older than the source node version [%s] and may " + - "not support codecs or postings formats for a newer Lucene version", - target.node().getVersion(), source.node().getVersion()); + return allocation.decision(Decision.NO, NAME, + "cannot allocate replica shard to a node with version [%s] since this is older than the primary version [%s]", + target.node().getVersion(), source.node().getVersion()); } } - private Decision isVersionCompatible(SnapshotRecoverySource recoverySource, final RoutingNode target, RoutingAllocation allocation) { + private Decision isVersionCompatible(SnapshotRecoverySource recoverySource, final RoutingNode target, + final RoutingAllocation allocation) { if (target.node().getVersion().onOrAfter(recoverySource.version())) { /* we can allocate if we can restore from a snapshot that is older or on the same version */ - return allocation.decision(Decision.YES, NAME, "target node version [%s] is the same or newer than snapshot version [%s]", + return allocation.decision(Decision.YES, NAME, "node version [%s] is the same or newer than snapshot version [%s]", target.node().getVersion(), recoverySource.version()); } else { - return allocation.decision(Decision.NO, NAME, "target node version [%s] is older than the snapshot version [%s] and may " + - "not support codecs or postings formats for a newer Lucene version", + return allocation.decision(Decision.NO, NAME, "node version [%s] is older than the snapshot version [%s]", target.node().getVersion(), recoverySource.version()); } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index df2e7a123a3..f960664306f 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.core.internal.io.Streams; import java.io.BufferedInputStream; import java.io.FileNotFoundException; @@ -128,7 +128,7 @@ public class FsBlobContainer extends AbstractBlobContainer { } final Path file = path.resolve(blobName); try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) { - Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]); + Streams.copy(inputStream, outputStream); } IOUtils.fsync(file, false); IOUtils.fsync(path, true); diff --git a/server/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java b/server/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java index 332d9024e99..3b1202fe66f 100644 --- a/server/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java +++ b/server/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java @@ -21,11 +21,11 @@ package org.elasticsearch.common.compress; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ObjectParserHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ObjectParserHelper.java new file mode 100644 index 00000000000..b40b9819819 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ObjectParserHelper.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.function.BiConsumer; + +/** + * This class provides helpers for {@link ObjectParser} that allow dealing with + * classes outside of the xcontent dependencies. + */ +public final class ObjectParserHelper { + + /** + * Helper to declare an object that will be parsed into a {@link BytesReference} + */ + public void declareRawObject(final AbstractObjectParser parser, + final BiConsumer consumer, + final ParseField field) { + final CheckedFunction bytesParser = p -> { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.copyCurrentStructure(p); + return BytesReference.bytes(builder); + } + }; + parser.declareField(consumer, bytesParser, field, ValueType.OBJECT); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/Index.java b/server/src/main/java/org/elasticsearch/index/Index.java index 41cb90d5ba8..ac5a2763644 100644 --- a/server/src/main/java/org/elasticsearch/index/Index.java +++ b/server/src/main/java/org/elasticsearch/index/Index.java @@ -87,7 +87,7 @@ public class Index implements Writeable, ToXContentObject { if (this == o) { return true; } - if (o == null) { + if (o == null || getClass() != o.getClass()) { return false; } Index index1 = (Index) o; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 4ce8aae52c1..3920b730d7a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -42,7 +42,6 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; @@ -358,7 +357,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder, ToXContentFragm @Override public boolean equals(Object o) { if (this == o) return true; - if (o == null) return false; + if (o == null || getClass() != o.getClass()) return false; ShardId shardId1 = (ShardId) o; return shardId == shardId1.shardId && index.equals(shardId1.index); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java index 14ee8ecb9b3..53d14f32299 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -106,7 +106,7 @@ public abstract class BaseTranslogReader implements Comparable= 0; i--) { @@ -684,12 +681,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (closed.get() == false) { current.sync(); } - } catch (Exception ex) { - try { - closeOnTragicEvent(ex); - } catch (Exception inner) { - ex.addSuppressed(inner); - } + } catch (final Exception ex) { + closeOnTragicEvent(ex); throw ex; } } @@ -724,12 +717,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } - } catch (Exception ex) { - try { - closeOnTragicEvent(ex); - } catch (Exception inner) { - ex.addSuppressed(inner); - } + } catch (final Exception ex) { + closeOnTragicEvent(ex); throw ex; } return false; @@ -753,14 +742,14 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } - private void closeOnTragicEvent(Exception ex) { + private void closeOnTragicEvent(final Exception ex) { if (current.getTragicException() != null) { try { close(); - } catch (AlreadyClosedException inner) { + } catch (final AlreadyClosedException inner) { // don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as suppressed because // will contain the Exception ex as cause. See also https://github.com/elastic/elasticsearch/issues/15941 - } catch (Exception inner) { + } catch (final Exception inner) { assert (ex != inner.getCause()); ex.addSuppressed(inner); } @@ -1614,12 +1603,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC assert readers.isEmpty() == false || current.generation == minReferencedGen : "all readers were cleaned but the minReferenceGen [" + minReferencedGen + "] is not the current writer's gen [" + current.generation + "]"; - } catch (Exception ex) { - try { - closeOnTragicEvent(ex); - } catch (final Exception inner) { - ex.addSuppressed(inner); - } + } catch (final Exception ex) { + closeOnTragicEvent(ex); throw ex; } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index a1e7e188014..4846fdb4e46 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -380,38 +380,31 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { @Override protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - if (position + targetBuffer.remaining() > getWrittenOffset()) { - synchronized (this) { - // we only flush here if it's really really needed - try to minimize the impact of the read operation - // in some cases ie. a tragic event we might still be able to read the relevant value - // which is not really important in production but some test can make most strict assumptions - // if we don't fail in this call unless absolutely necessary. - if (position + targetBuffer.remaining() > getWrittenOffset()) { - outputStream.flush(); + try { + if (position + targetBuffer.remaining() > getWrittenOffset()) { + synchronized (this) { + // we only flush here if it's really really needed - try to minimize the impact of the read operation + // in some cases ie. a tragic event we might still be able to read the relevant value + // which is not really important in production but some test can make most strict assumptions + // if we don't fail in this call unless absolutely necessary. + if (position + targetBuffer.remaining() > getWrittenOffset()) { + outputStream.flush(); + } } } + } catch (final IOException e) { + try { + closeWithTragicEvent(e); + } catch (final IOException inner) { + e.addSuppressed(inner); + } + throw e; } // we don't have to have a lock here because we only write ahead to the file, so all writes has been complete // for the requested location. Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - private static Checkpoint writeCheckpoint( - ChannelFactory channelFactory, - long syncPosition, - int numOperations, - long minSeqNo, - long maxSeqNo, - long globalCheckpoint, - long minTranslogGeneration, - Path translogFile, - long generation) throws IOException { - final Checkpoint checkpoint = - new Checkpoint(syncPosition, numOperations, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); - writeCheckpoint(channelFactory, translogFile, checkpoint); - return checkpoint; - } - private static void writeCheckpoint( final ChannelFactory channelFactory, final Path translogFile, diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index bd6d75ea3d6..a0095613cdb 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -229,6 +229,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public boolean equals(Object o) { if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; if (readerVersion != key.readerVersion) return false; if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false; diff --git a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index ff7c3009dcf..e7573ae9f71 100644 --- a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -225,6 +225,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL @Override public boolean equals(Object o) { if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; if (!indexCache.equals(key.indexCache)) return false; if (!readerKey.equals(key.readerKey)) return false; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 2a5bab52d5b..59a1a521de7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -38,13 +38,13 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.seqno.LocalCheckpointTracker; diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 0f8e29d7f38..f1adf9273ff 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -55,7 +55,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.NotXContentException; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -75,6 +74,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index a959cd0efb8..8c8139d5abd 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -39,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.gateway.CorruptStateException; import java.io.ByteArrayOutputStream; diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index d6aba28ce27..111663497d7 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.path.PathTrie; @@ -35,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.usage.UsageService; @@ -51,7 +51,6 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import java.util.function.UnaryOperator; import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index e522392cf4b..56cf71b82cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -125,7 +125,7 @@ public class InternalFilters extends InternalMultiBucketAggregation parserHelper = new ObjectParserHelper<>(); + parserHelper.declareRawObject(PARSER, optionalConstructorArg(), new ParseField("status")); PARSER.declareLong(constructorArg(), new ParseField("start_time_in_millis")); PARSER.declareLong(constructorArg(), new ParseField("running_time_in_nanos")); PARSER.declareBoolean(constructorArg(), new ParseField("cancellable")); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java index f75a4fe7ee5..a866ad9bb2d 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParserHelper; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -185,8 +186,9 @@ public final class TaskResult implements Writeable, ToXContentObject { static { PARSER.declareBoolean(constructorArg(), new ParseField("completed")); PARSER.declareObject(constructorArg(), TaskInfo.PARSER, new ParseField("task")); - PARSER.declareRawObject(optionalConstructorArg(), new ParseField("error")); - PARSER.declareRawObject(optionalConstructorArg(), new ParseField("response")); + ObjectParserHelper parserHelper = new ObjectParserHelper<>(); + parserHelper.declareRawObject(PARSER, optionalConstructorArg(), new ParseField("error")); + parserHelper.declareRawObject(PARSER, optionalConstructorArg(), new ParseField("response")); } @Override diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index de63994457a..6ec949a0c91 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -19,10 +19,9 @@ package org.elasticsearch.tasks; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -38,13 +37,12 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import java.io.ByteArrayOutputStream; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/action/GenericActionTests.java b/server/src/test/java/org/elasticsearch/action/GenericActionTests.java new file mode 100644 index 00000000000..1bbff4b2a99 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/GenericActionTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.test.ESTestCase; + +public class GenericActionTests extends ESTestCase { + + public void testEquals() { + class FakeAction extends GenericAction { + protected FakeAction(String name) { + super(name); + } + + @Override + public ActionResponse newResponse() { + return null; + } + } + FakeAction fakeAction1 = new FakeAction("a"); + FakeAction fakeAction2 = new FakeAction("a"); + FakeAction fakeAction3 = new FakeAction("b"); + String s = "Some random other object"; + assertEquals(fakeAction1, fakeAction1); + assertEquals(fakeAction2, fakeAction2); + assertNotEquals(fakeAction1, null); + assertNotEquals(fakeAction1, fakeAction3); + assertNotEquals(fakeAction1, s); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java index 1811bfb89a6..fcd73a6f1dd 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; @@ -42,6 +43,7 @@ import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomRo import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; public class AliasActionsTests extends ESTestCase { @@ -265,9 +267,9 @@ public class AliasActionsTests extends ESTestCase { } b.endObject(); try (XContentParser parser = createParser(b)) { - Exception e = expectThrows(ParsingException.class, () -> AliasActions.PARSER.apply(parser, null)); + Exception e = expectThrows(XContentParseException.class, () -> AliasActions.PARSER.apply(parser, null)); assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class)); - assertEquals("Only one of [index] and [indices] is supported", e.getCause().getCause().getMessage()); + assertThat(e.getCause().getCause().getMessage(), containsString("Only one of [index] and [indices] is supported")); } } @@ -285,9 +287,9 @@ public class AliasActionsTests extends ESTestCase { } b.endObject(); try (XContentParser parser = createParser(b)) { - Exception e = expectThrows(ParsingException.class, () -> AliasActions.PARSER.apply(parser, null)); + Exception e = expectThrows(XContentParseException.class, () -> AliasActions.PARSER.apply(parser, null)); assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class)); - assertEquals("Only one of [alias] and [aliases] is supported", e.getCause().getCause().getMessage()); + assertThat(e.getCause().getCause().getMessage(), containsString("Only one of [alias] and [aliases] is supported")); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 16afa92fb03..1e8d8e2a293 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.indices.rollover; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestTests; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -33,6 +32,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.indices.IndicesModule; @@ -172,7 +172,7 @@ public class RolloverRequestTests extends ESTestCase { } builder.endObject(); BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); - expectThrows(ParsingException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); + expectThrows(XContentParseException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } public void testSameConditionCanOnlyBeAddedOnce() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index 8a62e14ba57..c1861572d83 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -118,4 +118,19 @@ public class AllocationIdTests extends ESTestCase { AllocationId parsedAllocationId = AllocationId.fromXContent(createParser(JsonXContent.jsonXContent, bytes)); assertEquals(allocationId, parsedAllocationId); } + + public void testEquals() { + AllocationId allocationId1 = AllocationId.newInitializing(); + AllocationId allocationId2 = AllocationId.newInitializing(allocationId1.getId()); + AllocationId allocationId3 = AllocationId.newInitializing("not a UUID"); + String s = "Some random other object"; + assertEquals(allocationId1, allocationId1); + assertEquals(allocationId1, allocationId2); + assertNotEquals(allocationId1, s); + assertNotEquals(allocationId1, null); + assertNotEquals(allocationId1, allocationId3); + + allocationId2 = AllocationId.newRelocation(allocationId1); + assertNotEquals(allocationId1, allocationId2); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexShardRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexShardRoutingTableTests.java new file mode 100644 index 00000000000..7823970ff46 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexShardRoutingTableTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing; + +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; + +public class IndexShardRoutingTableTests extends ESTestCase { + public void testEqualsAttributesKey() { + String[] attr1 = {"a"}; + String[] attr2 = {"b"}; + IndexShardRoutingTable.AttributesKey attributesKey1 = new IndexShardRoutingTable.AttributesKey(attr1); + IndexShardRoutingTable.AttributesKey attributesKey2 = new IndexShardRoutingTable.AttributesKey(attr1); + IndexShardRoutingTable.AttributesKey attributesKey3 = new IndexShardRoutingTable.AttributesKey(attr2); + String s = "Some random other object"; + assertEquals(attributesKey1, attributesKey1); + assertEquals(attributesKey1, attributesKey2); + assertNotEquals(attributesKey1, null); + assertNotEquals(attributesKey1, s); + assertNotEquals(attributesKey1, attributesKey3); + } + + public void testEquals() { + Index index = new Index("a", "b"); + ShardId shardId = new ShardId(index, 1); + ShardId shardId2 = new ShardId(index, 2); + IndexShardRoutingTable table1 = new IndexShardRoutingTable(shardId, new ArrayList<>()); + IndexShardRoutingTable table2 = new IndexShardRoutingTable(shardId, new ArrayList<>()); + IndexShardRoutingTable table3 = new IndexShardRoutingTable(shardId2, new ArrayList<>()); + String s = "Some other random object"; + assertEquals(table1, table1); + assertEquals(table1, table2); + assertNotEquals(table1, null); + assertNotEquals(table1, s); + assertNotEquals(table1, table3); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java new file mode 100644 index 00000000000..c92da8e0a8f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing; + +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; + +public class PlainShardIteratorTests extends ESTestCase { + + public void testEquals() { + Index index = new Index("a", "b"); + ShardId shardId = new ShardId(index, 1); + ShardId shardId2 = new ShardId(index, 2); + PlainShardIterator iterator1 = new PlainShardIterator(shardId, new ArrayList<>()); + PlainShardIterator iterator2 = new PlainShardIterator(shardId, new ArrayList<>()); + PlainShardIterator iterator3 = new PlainShardIterator(shardId2, new ArrayList<>()); + String s = "Some other random object"; + assertEquals(iterator1, iterator1); + assertEquals(iterator1, iterator2); + assertNotEquals(iterator1, null); + assertNotEquals(iterator1, s); + assertNotEquals(iterator1, iterator3); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 077466906b7..88766e7943e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -33,15 +33,19 @@ import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; +import org.elasticsearch.cluster.routing.RoutingChangesObserver; +import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.common.UUIDs; @@ -68,6 +72,7 @@ import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { @@ -428,4 +433,82 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { } } + public void testMessages() { + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .build(); + + RoutingTable initialRoutingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + RoutingNode newNode = new RoutingNode("newNode", newNode("newNode", Version.CURRENT)); + RoutingNode oldNode = new RoutingNode("oldNode", newNode("oldNode", VersionUtils.getPreviousVersion())); + + final ClusterName clusterName = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY); + ClusterState clusterState = ClusterState.builder(clusterName).metaData(metaData).routingTable(initialRoutingTable) + .nodes(DiscoveryNodes.builder().add(newNode.node()).add(oldNode.node())).build(); + + final ShardId shardId = clusterState.routingTable().index("test").shard(0).getShardId(); + final ShardRouting primaryShard = clusterState.routingTable().shardRoutingTable(shardId).primaryShard(); + final ShardRouting replicaShard = clusterState.routingTable().shardRoutingTable(shardId).replicaShards().get(0); + + RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); + routingAllocation.debugDecision(true); + + final NodeVersionAllocationDecider allocationDecider = new NodeVersionAllocationDecider(Settings.EMPTY); + Decision decision = allocationDecider.canAllocate(primaryShard, newNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.YES)); + assertThat(decision.getExplanation(), is("the primary shard is new or already existed on the node")); + + decision = allocationDecider.canAllocate(ShardRoutingHelper.initialize(primaryShard, "oldNode"), newNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.YES)); + assertThat(decision.getExplanation(), is("can relocate primary shard from a node with version [" + + oldNode.node().getVersion() + "] to a node with equal-or-newer version [" + newNode.node().getVersion() + "]")); + + decision = allocationDecider.canAllocate(ShardRoutingHelper.initialize(primaryShard, "newNode"), oldNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.NO)); + assertThat(decision.getExplanation(), is("cannot relocate primary shard from a node with version [" + + newNode.node().getVersion() + "] to a node with older version [" + oldNode.node().getVersion() + "]")); + + final SnapshotRecoverySource newVersionSnapshot = new SnapshotRecoverySource( + new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), newNode.node().getVersion(), "test"); + final SnapshotRecoverySource oldVersionSnapshot = new SnapshotRecoverySource( + new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), oldNode.node().getVersion(), "test"); + + decision = allocationDecider.canAllocate(ShardRoutingHelper.newWithRestoreSource(primaryShard, newVersionSnapshot), + oldNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.NO)); + assertThat(decision.getExplanation(), is("node version [" + + oldNode.node().getVersion() + "] is older than the snapshot version [" + newNode.node().getVersion() + "]")); + + decision = allocationDecider.canAllocate(ShardRoutingHelper.newWithRestoreSource(primaryShard, oldVersionSnapshot), + newNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.YES)); + assertThat(decision.getExplanation(), is("node version [" + + newNode.node().getVersion() + "] is the same or newer than snapshot version [" + oldNode.node().getVersion() + "]")); + + final RoutingChangesObserver routingChangesObserver = new RoutingChangesObserver.AbstractRoutingChangesObserver(); + final RoutingNodes routingNodes = new RoutingNodes(clusterState, false); + final ShardRouting startedPrimary = routingNodes.startShard(logger, routingNodes.initializeShard(primaryShard, "newNode", null, 0, + routingChangesObserver), routingChangesObserver); + routingAllocation = new RoutingAllocation(null, routingNodes, clusterState, null, 0); + routingAllocation.debugDecision(true); + + decision = allocationDecider.canAllocate(replicaShard, oldNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.NO)); + assertThat(decision.getExplanation(), is("cannot allocate replica shard to a node with version [" + + oldNode.node().getVersion() + "] since this is older than the primary version [" + newNode.node().getVersion() + "]")); + + routingNodes.startShard(logger, routingNodes.relocateShard(startedPrimary, "oldNode", 0, routingChangesObserver).v2(), + routingChangesObserver); + routingAllocation = new RoutingAllocation(null, routingNodes, clusterState, null, 0); + routingAllocation.debugDecision(true); + + decision = allocationDecider.canAllocate(replicaShard, newNode, routingAllocation); + assertThat(decision.type(), is(Decision.Type.YES)); + assertThat(decision.getExplanation(), is("can allocate replica shard to a node with version [" + + newNode.node().getVersion() + "] since this is equal-or-newer than the primary version [" + oldNode.node().getVersion() + "]")); + } } diff --git a/server/src/test/java/org/elasticsearch/common/io/StreamsTests.java b/server/src/test/java/org/elasticsearch/common/io/StreamsTests.java index 76b52c08a85..ee1933e3a10 100644 --- a/server/src/test/java/org/elasticsearch/common/io/StreamsTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/StreamsTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.StringReader; @@ -32,7 +31,6 @@ import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.util.Arrays; -import static org.elasticsearch.common.io.Streams.copy; import static org.elasticsearch.common.io.Streams.copyToString; import static org.hamcrest.Matchers.equalTo; @@ -40,20 +38,11 @@ import static org.hamcrest.Matchers.equalTo; * Unit tests for {@link org.elasticsearch.common.io.Streams}. */ public class StreamsTests extends ESTestCase { - public void testCopyFromInputStream() throws IOException { - byte[] content = "content".getBytes(StandardCharsets.UTF_8); - ByteArrayInputStream in = new ByteArrayInputStream(content); - ByteArrayOutputStream out = new ByteArrayOutputStream(content.length); - long count = copy(in, out); - - assertThat(count, equalTo((long) content.length)); - assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true)); - } public void testCopyFromByteArray() throws IOException { byte[] content = "content".getBytes(StandardCharsets.UTF_8); ByteArrayOutputStream out = new ByteArrayOutputStream(content.length); - copy(content, out); + Streams.copy(content, out); assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true)); } @@ -61,7 +50,7 @@ public class StreamsTests extends ESTestCase { String content = "content"; StringReader in = new StringReader(content); StringWriter out = new StringWriter(); - int count = copy(in, out); + int count = Streams.copy(in, out); assertThat(content.length(), equalTo(count)); assertThat(out.toString(), equalTo(content)); } @@ -69,7 +58,7 @@ public class StreamsTests extends ESTestCase { public void testCopyFromString() throws IOException { String content = "content"; StringWriter out = new StringWriter(); - copy(content, out); + Streams.copy(content, out); assertThat(out.toString(), equalTo(content)); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index fda181614ff..f1360071745 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -60,4 +60,18 @@ public class IndexTests extends ESTestCase { parser.nextToken(); // the beginning of the parser assertThat(Index.fromXContent(parser), equalTo(original)); } + + public void testEquals() { + Index index1 = new Index("a", "a"); + Index index2 = new Index("a", "a"); + Index index3 = new Index("a", "b"); + Index index4 = new Index("b", "a"); + String s = "Some random other object"; + assertEquals(index1, index1); + assertEquals(index1, index2); + assertNotEquals(index1, null); + assertNotEquals(index1, s); + assertNotEquals(index1, index3); + assertNotEquals(index1, index4); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index e440fc02772..a3c3aa3627c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -33,6 +33,7 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; public class IdsQueryBuilderTests extends AbstractQueryTestCase { @@ -94,7 +95,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase public void testIdsQueryWithInvalidValues() throws Exception { String query = "{ \"ids\": { \"values\": [[1]] } }"; ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query)); - assertEquals("[ids] failed to parse field [values]", e.getMessage()); + assertThat(e.getMessage(), containsString("[ids] failed to parse field [values]")); } public void testFromJson() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index aba7836a5a3..aafc66b3985 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -66,7 +66,9 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; @@ -172,6 +174,206 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase fields = instance.fields(); + Operator operator = instance.defaultOperator(); + Fuzziness fuzziness = instance.fuzziness(); + String analyzer = instance.analyzer(); + String quoteAnalyzer = instance.quoteAnalyzer(); + Boolean allowLeadingWildCard = instance.allowLeadingWildcard(); + Boolean analyzeWildcard = instance.analyzeWildcard(); + int maxDeterminizedStates = instance.maxDeterminizedStates(); + boolean enablePositionIncrements = instance.enablePositionIncrements(); + boolean escape = instance.escape(); + int phraseSlop = instance.phraseSlop(); + int fuzzyMaxExpansions = instance.fuzzyMaxExpansions(); + int fuzzyPrefixLength = instance.fuzzyPrefixLength(); + String fuzzyRewrite = instance.fuzzyRewrite(); + String rewrite = instance.rewrite(); + String quoteFieldSuffix = instance.quoteFieldSuffix(); + Float tieBreaker = instance.tieBreaker(); + String minimumShouldMatch = instance.minimumShouldMatch(); + String timeZone = instance.timeZone() == null ? null : instance.timeZone().getID(); + boolean autoGenerateSynonymsPhraseQuery = instance.autoGenerateSynonymsPhraseQuery(); + boolean fuzzyTranspositions = instance.fuzzyTranspositions(); + + switch (between(0, 23)) { + case 0: + query = query + " foo"; + break; + case 1: + if (defaultField == null) { + defaultField = randomAlphaOfLengthBetween(1, 10); + } else { + defaultField = defaultField + randomAlphaOfLength(5); + } + break; + case 2: + fields = new HashMap<>(fields); + fields.put(randomAlphaOfLength(10), 1.0f); + break; + case 3: + operator = randomValueOtherThan(operator, () -> randomFrom(Operator.values())); + break; + case 4: + fuzziness = randomValueOtherThan(fuzziness, () -> randomFrom(Fuzziness.AUTO, Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); + break; + case 5: + if (analyzer == null) { + analyzer = randomAnalyzer(); + } else { + analyzer = null; + } + break; + case 6: + if (quoteAnalyzer == null) { + quoteAnalyzer = randomAnalyzer(); + } else { + quoteAnalyzer = null; + } + break; + case 7: + if (allowLeadingWildCard == null) { + allowLeadingWildCard = randomBoolean(); + } else { + allowLeadingWildCard = randomBoolean() ? null : (allowLeadingWildCard == false); + } + break; + case 8: + if (analyzeWildcard == null) { + analyzeWildcard = randomBoolean(); + } else { + analyzeWildcard = randomBoolean() ? null : (analyzeWildcard == false); + } + break; + case 9: + maxDeterminizedStates += 5; + break; + case 10: + enablePositionIncrements = (enablePositionIncrements == false); + break; + case 11: + escape = (escape == false); + break; + case 12: + phraseSlop += 5; + break; + case 13: + fuzzyMaxExpansions += 5; + break; + case 14: + fuzzyPrefixLength += 5; + break; + case 15: + if (fuzzyRewrite == null) { + fuzzyRewrite = getRandomRewriteMethod(); + } else { + fuzzyRewrite = null; + } + break; + case 16: + if (rewrite == null) { + rewrite = getRandomRewriteMethod(); + } else { + rewrite = null; + } + break; + case 17: + if (quoteFieldSuffix == null) { + quoteFieldSuffix = randomAlphaOfLengthBetween(1, 3); + } else { + quoteFieldSuffix = quoteFieldSuffix + randomAlphaOfLength(1); + } + break; + case 18: + if (tieBreaker == null) { + tieBreaker = randomFloat(); + } else { + tieBreaker += 0.05f; + } + break; + case 19: + if (minimumShouldMatch == null) { + minimumShouldMatch = randomMinimumShouldMatch(); + } else { + minimumShouldMatch = null; + } + break; + case 20: + if (timeZone == null) { + timeZone = randomDateTimeZone().getID(); + } else { + if (randomBoolean()) { + timeZone = null; + } else { + timeZone = randomValueOtherThan(timeZone, () -> randomDateTimeZone().getID()); + } + } + break; + case 21: + autoGenerateSynonymsPhraseQuery = (autoGenerateSynonymsPhraseQuery == false); + break; + case 22: + fuzzyTranspositions = (fuzzyTranspositions == false); + break; + case 23: + return changeNameOrBoost(instance); + default: + throw new AssertionError("Illegal randomisation branch"); + } + + QueryStringQueryBuilder newInstance = new QueryStringQueryBuilder(query); + if (defaultField != null) { + newInstance.defaultField(defaultField); + } + newInstance.fields(fields); + newInstance.defaultOperator(operator); + newInstance.fuzziness(fuzziness); + if (analyzer != null) { + newInstance.analyzer(analyzer); + } + if (quoteAnalyzer != null) { + newInstance.quoteAnalyzer(quoteAnalyzer); + } + if (allowLeadingWildCard != null) { + newInstance.allowLeadingWildcard(allowLeadingWildCard); + } + if (analyzeWildcard != null) { + newInstance.analyzeWildcard(analyzeWildcard); + } + newInstance.maxDeterminizedStates(maxDeterminizedStates); + newInstance.enablePositionIncrements(enablePositionIncrements); + newInstance.escape(escape); + newInstance.phraseSlop(phraseSlop); + newInstance.fuzzyMaxExpansions(fuzzyMaxExpansions); + newInstance.fuzzyPrefixLength(fuzzyPrefixLength); + if (fuzzyRewrite != null) { + newInstance.fuzzyRewrite(fuzzyRewrite); + } + if (rewrite != null) { + newInstance.rewrite(rewrite); + } + if (quoteFieldSuffix != null) { + newInstance.quoteFieldSuffix(quoteFieldSuffix); + } + if (tieBreaker != null) { + newInstance.tieBreaker(tieBreaker); + } + if (minimumShouldMatch != null) { + newInstance.minimumShouldMatch(minimumShouldMatch); + } + if (timeZone != null) { + newInstance.timeZone(timeZone); + } + newInstance.autoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + newInstance.fuzzyTranspositions(fuzzyTranspositions); + + return newInstance; + } + @Override protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { @@ -182,6 +384,16 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase new QueryStringQueryBuilder((String) null)); } diff --git a/server/src/test/java/org/elasticsearch/index/search/SimpleQueryStringQueryParserTests.java b/server/src/test/java/org/elasticsearch/index/search/SimpleQueryStringQueryParserTests.java new file mode 100644 index 00000000000..b89d7d056c0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/search/SimpleQueryStringQueryParserTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.search; + +import org.elasticsearch.test.ESTestCase; + +public class SimpleQueryStringQueryParserTests extends ESTestCase { + + public void testEqualsSettings() { + SimpleQueryStringQueryParser.Settings settings1 = new SimpleQueryStringQueryParser.Settings(); + SimpleQueryStringQueryParser.Settings settings2 = new SimpleQueryStringQueryParser.Settings(); + String s = "Some random other object"; + assertEquals(settings1, settings1); + assertEquals(settings1, settings2); + assertNotEquals(settings1, null); + assertNotEquals(settings1, s); + + settings2.lenient(!settings1.lenient()); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.analyzeWildcard(!settings1.analyzeWildcard()); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.quoteFieldSuffix("a"); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.autoGenerateSynonymsPhraseQuery(!settings1.autoGenerateSynonymsPhraseQuery()); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.fuzzyPrefixLength(settings1.fuzzyPrefixLength() + 1); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.fuzzyMaxExpansions(settings1.fuzzyMaxExpansions() + 1); + assertNotEquals(settings1, settings2); + + settings2 = new SimpleQueryStringQueryParser.Settings(); + settings2.fuzzyTranspositions(!settings1.fuzzyTranspositions()); + assertNotEquals(settings1, settings2); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java index d7f6d147604..93895d4e43a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.shard; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; public class ShardIdTests extends ESTestCase { @@ -51,4 +52,20 @@ public class ShardIdTests extends ESTestCase { ex = expectThrows(IllegalArgumentException.class, () -> ShardId.fromString(badId3)); } + + public void testEquals() { + Index index1 = new Index("a", "a"); + Index index2 = new Index("a", "b"); + ShardId shardId1 = new ShardId(index1, 0); + ShardId shardId2 = new ShardId(index1, 0); + ShardId shardId3 = new ShardId(index2, 0); + ShardId shardId4 = new ShardId(index1, 1); + String s = "Some random other object"; + assertEquals(shardId1, shardId1); + assertEquals(shardId1, shardId2); + assertNotEquals(shardId1, null); + assertNotEquals(shardId1, s); + assertNotEquals(shardId1, shardId3); + assertNotEquals(shardId1, shardId4); + } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 9a2a5d1eacd..8059c8a1039 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -44,6 +46,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; @@ -421,6 +424,79 @@ public class IndicesRequestCacheTests extends ESTestCase { assertEquals(0, cache.numRegisteredCloseListeners()); } + public void testEqualsKey() { + AtomicBoolean trueBoolean = new AtomicBoolean(true); + AtomicBoolean falseBoolean = new AtomicBoolean(false); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(1)); + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(1)); + IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), 1L, new TestBytesReference(1)); + IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 2L, new TestBytesReference(1)); + IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), 1L, new TestBytesReference(2)); + String s = "Some other random object"; + assertEquals(key1, key1); + assertEquals(key1, key2); + assertNotEquals(key1, null); + assertNotEquals(key1, s); + assertNotEquals(key1, key3); + assertNotEquals(key1, key4); + assertNotEquals(key1, key5); + } + + private class TestBytesReference extends BytesReference { + + int dummyValue; + TestBytesReference(int dummyValue) { + this.dummyValue = dummyValue; + } + + @Override + public boolean equals(Object other) { + return other instanceof TestBytesReference && this.dummyValue == ((TestBytesReference) other).dummyValue; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + dummyValue; + return result; + } + + @Override + public byte get(int index) { + return 0; + } + + @Override + public int length() { + return 0; + } + + @Override + public BytesReference slice(int from, int length) { + return null; + } + + @Override + public BytesRef toBytesRef() { + return null; + } + + @Override + public long ramBytesUsed() { + return 0; + } + + @Override + public Collection getChildResources() { + return null; + } + + @Override + public boolean isFragment() { + return false; + } + } + private class TestEntity extends AbstractIndexShardCacheEntity { private final AtomicBoolean standInForIndexShard; private final ShardRequestCache shardRequestCache; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index acb6b0f0992..08ae503102e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -19,7 +19,8 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; @@ -78,8 +79,9 @@ public class DateRangeTests extends BaseAggregationTestCase DateRangeAggregationBuilder.parse("aggregationName", parser)); - assertThat(ex.getDetailedMessage(), containsString("badField")); + XContentParseException ex = expectThrows(XContentParseException.class, + () -> DateRangeAggregationBuilder.parse("aggregationName", parser)); + assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index 9549c22019b..fcdbc81c0c6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -19,10 +19,11 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; @@ -78,8 +79,9 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase GeoDistanceAggregationBuilder.parse("aggregationName", parser)); - assertThat(ex.getDetailedMessage(), containsString("badField")); + XContentParseException ex = expectThrows(XContentParseException.class, + () -> GeoDistanceAggregationBuilder.parse("aggregationName", parser)); + assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField")); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java index b502645a24c..2cf03b96093 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java @@ -19,7 +19,8 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; @@ -74,8 +75,9 @@ public class RangeTests extends BaseAggregationTestCase "]\n" + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation); - ParsingException ex = expectThrows(ParsingException.class, () -> RangeAggregationBuilder.parse("aggregationName", parser)); - assertThat(ex.getDetailedMessage(), containsString("badField")); + XContentParseException ex = expectThrows(XContentParseException.class, + () -> RangeAggregationBuilder.parse("aggregationName", parser)); + assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField")); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java index 7f46cb9e551..e431bf19ff3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java @@ -18,8 +18,9 @@ */ package org.elasticsearch.search.aggregations.bucket.geogrid; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; @@ -73,8 +74,9 @@ public class GeoHashGridParserTests extends ESTestCase { "{\"field\":\"my_loc\", \"precision\": \"10kg\", \"size\": \"500\", \"shard_size\": \"550\"}"); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - ParsingException ex = expectThrows(ParsingException.class, () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); - assertEquals("[geohash_grid] failed to parse field [precision]", ex.getMessage()); + XContentParseException ex = expectThrows(XContentParseException.class, + () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); + assertThat(ex.getMessage(), containsString("[geohash_grid] failed to parse field [precision]")); assertThat(ex.getCause(), instanceOf(NumberFormatException.class)); assertEquals("For input string: \"10kg\"", ex.getCause().getMessage()); } @@ -84,8 +86,9 @@ public class GeoHashGridParserTests extends ESTestCase { "{\"field\":\"my_loc\", \"precision\": \"1cm\", \"size\": \"500\", \"shard_size\": \"550\"}"); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - ParsingException ex = expectThrows(ParsingException.class, () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); - assertEquals("[geohash_grid] failed to parse field [precision]", ex.getMessage()); + XContentParseException ex = expectThrows(XContentParseException.class, + () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); + assertThat(ex.getMessage(), containsString("[geohash_grid] failed to parse field [precision]")); assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertEquals("precision too high [1cm]", ex.getCause().getMessage()); } @@ -94,8 +97,10 @@ public class GeoHashGridParserTests extends ESTestCase { XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}"); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - Exception e = expectThrows(ParsingException.class, () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); - assertThat(e.getMessage(), containsString("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN")); + XContentParseException e = expectThrows(XContentParseException.class, + () -> GeoGridAggregationBuilder.parse("geohash_grid", stParser)); + assertThat(ExceptionsHelper.detailedMessage(e), + containsString("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN")); } public void testParseErrorOnPrecisionOutOfRange() throws Exception { @@ -105,9 +110,9 @@ public class GeoHashGridParserTests extends ESTestCase { try { GeoGridAggregationBuilder.parse("geohash_grid", stParser); fail(); - } catch (ParsingException ex) { + } catch (XContentParseException ex) { assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getCause().getMessage()); } } -} \ No newline at end of file +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index b8c9825d9b5..02909d673be 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; @@ -270,7 +271,7 @@ public class SignificanceHeuristicTests extends ESTestCase { stParser.nextToken(); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser); fail(); - } catch (ParsingException e) { + } catch (XContentParseException e) { assertThat(e.getCause().getMessage(), containsString(expectedError)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index a492e19496f..7410ce0c3e3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; @@ -27,6 +29,8 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggr import java.io.IOException; +import static org.hamcrest.Matchers.containsString; + public class PercentilesTests extends BaseAggregationTestCase { @Override @@ -85,12 +89,8 @@ public class PercentilesTests extends BaseAggregationTestCase PercentilesAggregationBuilder.parse("myPercentiles", parser)); - assertEquals( - "ParsingException[[percentiles] failed to parse field [hdr]]; " - + "nested: IllegalStateException[Only one percentiles method should be declared.];; " - + "java.lang.IllegalStateException: Only one percentiles method should be declared.", - e.getDetailedMessage()); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("[percentiles] failed to parse field [hdr]")); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index c002d08e6f7..5d06fd4cd40 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -69,6 +70,7 @@ import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class HighlightBuilderTests extends ESTestCase { @@ -163,15 +165,15 @@ public class HighlightBuilderTests extends ESTestCase { } { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : [ \"field1\" , \"field2\" ]\n" + " }\n" + " }\n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } @@ -193,15 +195,15 @@ public class HighlightBuilderTests extends ESTestCase { } { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : \"value\"\n" + " }\n" + " }\n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } @@ -218,49 +220,50 @@ public class HighlightBuilderTests extends ESTestCase { } { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n" + " }\n" + " }\n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } public void testStringInFieldsArray() throws IOException { - ParsingException e = expectParseThrows(ParsingException.class, "{\"fields\" : [ \"junk\" ]}"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals( - "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + XContentParseException e = expectParseThrows(XContentParseException.class, "{\"fields\" : [ \"junk\" ]}"); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), + containsString("[fields] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testNoFieldsInObjectInFieldsArray() throws IOException { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : [ {\n" + " }] \n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals( - "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), + containsString("[fields] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); } public void testTwoFieldsInObjectInFieldsArray() throws IOException { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : [ {\n" + " \"body\" : {},\n" + " \"nope\" : {}\n" + " }] \n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals( - "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", - e.getCause().getMessage()); } + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), + containsString("[fields] can be a single object with any number of fields " + + "or an array where each entry is an object with a single field")); + } /** * test that build() outputs a {@link SearchContextHighlight} that is has similar parameters @@ -405,10 +408,10 @@ public class HighlightBuilderTests extends ESTestCase { assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"tags_schema\" : \"somthing_else\"\n" + "}\n"); - assertEquals("[highlight] failed to parse field [tags_schema]", e.getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); } @@ -436,20 +439,20 @@ public class HighlightBuilderTests extends ESTestCase { } public void testPreTagsWithoutPostTags() throws IOException { - ParsingException e = expectParseThrows(ParsingException.class, "{\n" + + ParsingException err = expectParseThrows(ParsingException.class, "{\n" + " \"pre_tags\" : [\"\"]\n" + "}\n"); - assertEquals("pre_tags are set but post_tags are not set", e.getMessage()); + assertEquals("pre_tags are set but post_tags are not set", err.getMessage()); - e = expectParseThrows(ParsingException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"pre_tags\" : [\"\"]\n" + " }\n" + " }\n" + "}\n"); - assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); - assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); + assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); assertEquals("pre_tags are set but post_tags are not set", e.getCause().getCause().getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 3b1002a6f68..fa0531262bb 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -42,12 +42,12 @@ import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 9a9797734b6..75ac542d985 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -55,6 +56,7 @@ import java.io.IOException; import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.containsString; public class QueryRescorerBuilderTests extends ESTestCase { @@ -262,8 +264,8 @@ public class QueryRescorerBuilderTests extends ESTestCase { "}\n"; { XContentParser parser = createParser(rescoreElement); - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); - assertEquals("[query] failed to parse field [rescore_query]", e.getMessage()); + Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); } rescoreElement = "{\n" + diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 9a28740d727..ed83011c266 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; @@ -50,6 +51,7 @@ import java.util.HashSet; import java.util.Set; import static org.elasticsearch.search.sort.NestedSortBuilderTests.createRandomNestedSort; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; public class ScriptSortBuilderTests extends AbstractSortTestCase { @@ -246,8 +248,8 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[_script] script doesn't support values of type: START_ARRAY", e.getMessage()); + Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 9c62bb28483..b9da305e132 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -28,10 +28,12 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -403,8 +405,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); - assertEquals("category context must be an object, string, number or boolean", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); } public void testQueryContextParsingArray() throws Exception { @@ -460,8 +462,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); - assertEquals("category context must be an object, string, number or boolean", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); } public void testQueryContextParsingObject() throws Exception { @@ -518,8 +520,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); - assertEquals("category context must be a string, number or boolean", e.getMessage()); + Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); } public void testQueryContextParsingObjectArray() throws Exception { @@ -619,8 +621,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); - assertEquals("category context must be a string, number or boolean", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); } @@ -676,8 +678,8 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); - assertEquals("category context must be an object, string, number or boolean", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); } public void testUnknownQueryContextParsing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 33b638286b4..ebfac5f58ef 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -24,12 +24,12 @@ import org.apache.lucene.search.spell.JaroWinklerDistance; import org.apache.lucene.search.spell.LevensteinDistance; import org.apache.lucene.search.spell.LuceneLevenshteinDistance; import org.apache.lucene.search.spell.NGramDistance; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -43,6 +43,7 @@ import java.util.List; import java.util.function.Supplier; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -175,12 +176,12 @@ public class DirectCandidateGeneratorTests extends ESTestCase { // test bad value for field (e.g. size expects an int) directGenerator = "{ \"size\" : \"xxl\" }"; - assertIllegalXContent(directGenerator, ParsingException.class, + assertIllegalXContent(directGenerator, XContentParseException.class, "[direct_generator] failed to parse field [size]"); // test unexpected token directGenerator = "{ \"size\" : [ \"xxl\" ] }"; - assertIllegalXContent(directGenerator, ParsingException.class, + assertIllegalXContent(directGenerator, XContentParseException.class, "[direct_generator] size doesn't support values of type: START_ARRAY"); } @@ -188,7 +189,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase { throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator); Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); - assertEquals(exceptionMsg, e.getMessage()); + assertThat(e.getMessage(), containsString(exceptionMsg)); } /** diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java index 8431c8fa69f..777918a7d5e 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java @@ -96,7 +96,7 @@ public class SnapshotShardsServiceIT extends AbstractSnapshotIntegTestCase { .values().stream().map(status -> status.asCopy().getStage()).collect(Collectors.toList()); assertThat(stages, hasSize(shards)); assertThat(stages, everyItem(equalTo(IndexShardSnapshotStatus.Stage.DONE))); - }); + }, 30L, TimeUnit.SECONDS); logger.info("--> stop disrupting cluster"); networkDisruption.stopDisrupting(); @@ -110,6 +110,6 @@ public class SnapshotShardsServiceIT extends AbstractSnapshotIntegTestCase { logger.info("Snapshot status [{}], successfulShards [{}]", snapshotInfo.state(), snapshotInfo.successfulShards()); assertThat(snapshotInfo.state(), equalTo(SnapshotState.SUCCESS)); assertThat(snapshotInfo.successfulShards(), equalTo(shards)); - }, 10, TimeUnit.SECONDS); + }, 30L, TimeUnit.SECONDS); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 04ac1d6cda0..1c02f960143 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import com.fasterxml.jackson.core.io.JsonStringEncoder; + import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -55,11 +56,11 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentGenerator; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -338,7 +339,7 @@ public abstract class AbstractQueryTestCase> if (expectedException) { fail("some parsing exception expected for query: " + testQuery); } - } catch (ParsingException | ElasticsearchParseException e) { + } catch (ParsingException | ElasticsearchParseException | XContentParseException e) { // different kinds of exception wordings depending on location // of mutation, so no simple asserts possible here if (expectedException == false) { @@ -742,10 +743,14 @@ public abstract class AbstractQueryTestCase> for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { // TODO we only change name and boost, we should extend by any sub-test supplying a "mutate" method that randomly changes one // aspect of the object under test - checkEqualsAndHashCode(createTestQueryBuilder(), this::copyQuery, this::changeNameOrBoost); + checkEqualsAndHashCode(createTestQueryBuilder(), this::copyQuery, this::mutateInstance); } } + public QB mutateInstance(QB instance) throws IOException { + return changeNameOrBoost(instance); + } + /** * Generic test that checks that the Strings.toString() method * renders the XContent correctly. @@ -761,7 +766,7 @@ public abstract class AbstractQueryTestCase> } } - private QB changeNameOrBoost(QB original) throws IOException { + protected QB changeNameOrBoost(QB original) throws IOException { QB secondQuery = copyQuery(original); if (randomBoolean()) { secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() diff --git a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java index 1d0eaa7ce51..767b74e4472 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java @@ -20,8 +20,8 @@ package org.elasticsearch.test; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.core.internal.io.Streams; import java.io.FileNotFoundException; import java.io.IOException; @@ -36,7 +36,7 @@ public class StreamsUtils { if (is == null) { throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]"); } - return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)); + return org.elasticsearch.common.io.Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)); } public static String copyToStringFromClasspath(String path) throws IOException { @@ -44,7 +44,7 @@ public class StreamsUtils { if (is == null) { throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); } - return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)); + return org.elasticsearch.common.io.Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)); } public static byte[] copyToBytesFromClasspath(String path) throws IOException {